示例#1
0
  def acquire_obs_header (self, in_dir):
    """Generate the obs.header file for the whole band from sub-bands."""

    # test if already exists
    if os.path.exists (in_dir + "/obs.header"):
      self.log(2, "RepackDaemon::acquire_obs_header obs.header file already existed")
      return (0, "")
  
    subband_freqs = self.get_subbands (in_dir)

    # start with header file from first sub-band
    if not os.path.exists (in_dir + "/" + subband_freqs[0] + "/obs.header"):
      self.log(2, "RepackDaemon::acquire_obs_header first sub-band obs.header did not exist")
      return (1, "first sub-band header file did not exist")

    self.log (2, "RepackDaemon::acquire_obs_header header_file[0]=" + in_dir + "/" + subband_freqs[0] + "/obs.header")
    header = Config.readCFGFileIntoDict (in_dir + "/" + subband_freqs[0] + "/obs.header")

    # merge the headers from the other sub-bands
    for i in range(1,len(subband_freqs)):
      subband_header_file = in_dir + "/" + subband_freqs[i] + "/obs.header"
      self.log (2, "RepackDaemon::acquire_obs_header header_file[" + str(i)+ "]=" + subband_header_file)
      if os.path.exists (subband_header_file):
        header_sub = Config.readCFGFileIntoDict (subband_header_file)
        header = Config.mergeHeaderFreq (header, header_sub)
      else:
        return (1, "not all sub-band header files present")

    # write the combined header
    self.log (2, "RepackDaemon::acquire_obs_header writing header to " + in_dir + "/" + "obs.header")
    Config.writeDictToCFGFile (header, in_dir + "/" + "obs.header")

    return (0, "")
示例#2
0
  def generateObsInfoDat (self, finished_subdir, completed_subdir):

    obs_results_file = self.finished_dir + "/" + finished_subdir + "/obs.results"
    obs_header_file = self.completed_dir + "/" + completed_subdir + "/obs.header"
    obs_info_dat_file = self.completed_dir + "/" + completed_subdir + "/obs_info.dat"

    proposal_id = ""
   
    if not os.path.exists (obs_info_dat_file):

      self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat creating obs_info.dat")

      if os.path.exists(obs_results_file):
        self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat: reading " + obs_results_file)
        obs_results = Config.readCFGFileIntoDict(obs_results_file)
      else:
        self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat: generating results")
        obs_results = self.computeResults (self.finished_dir + "/" + finished_subdir)

      if not os.path.exists(obs_header_file):
        self.log (-1, "MeerKATArchiverDaemon::generateObsInfoDat: " + obs_header_file + " did not exist")
        return ("fail", "obs.header file did not exist")
      obs_header = Config.readCFGFileIntoDict(obs_header_file)

      obs_info_dat = {}
  
      obs_info_dat["observer"] = self.extractKey(obs_header ,"OBSERVER")
      obs_info_dat["program_block_id"] = self.extractKey(obs_header, "PROGRAM_BLOCK_ID")
      obs_info_dat["targets"] = "['" + self.extractKey(obs_header,"SOURCE") + "']"
      obs_info_dat["mode"] = self.extractKey(obs_header,"MODE")
      obs_info_dat["sb_id_code"] = self.extractKey(obs_header,"SCHEDULE_BLOCK_ID")
      obs_info_dat["target_duration"] = self.extractKey(obs_results, "length")
      obs_info_dat["target_snr"] = self.extractKey(obs_results, "snr")
      obs_info_dat["proposal_id"] = self.extractKey(obs_header, "PROPOSAL_ID")
      obs_info_dat["description"] = self.extractKey(obs_header, "DESCRIPTION")
      obs_info_dat["backend_args"] = "TBD"
      obs_info_dat["experiment_id"] = self.extractKey(obs_header, "EXPERIMENT_ID")
      obs_info_dat["adc_sync_time"] = self.extractKey(obs_header, "ADC_SYNC_TIME")
      obs_info_dat["precisetime_fraction"] = self.extractKey(obs_header, "PRECISETIME_FRACTION_AVG")
      obs_info_dat["utc_start_offset_picoseconds"] = self.extractKey(obs_header, "PICOSECONDS")

      fold_mode = self.extractKey(obs_header, "PERFORM_FOLD")
      search_mode = self.extractKey(obs_header, "PERFORM_SEARCH")

      Config.writeDictToColonSVFile(obs_info_dat, obs_info_dat_file)

      proposal_id = obs_info_dat["proposal_id"]

    else:
      self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat obs_info.dat existed")

    return ("ok", proposal_id)
示例#3
0
  def generateObsJSON(self, finished_subdir, completed_subdir):

    obs_results_file = self.finished_dir + "/" + finished_subdir + "/obs.results"
    obs_header_file = self.completed_dir + "/" + completed_subdir + "/obs.header"
    obs_info_json_file = self.completed_dir + "/" + completed_subdir + "/obs_info.json"

    proposal_id = ""

    if not os.path.exists (obs_info_json_file):

      self.debug("creating obs_info.json")

      if os.path.exists(obs_results_file):
        self.debug("reading " + obs_results_file)
        obs_results = Config.readCFGFileIntoDict(obs_results_file)
      else:
        self.debug("generating results")
        obs_results = self.computeResults (self.finished_dir + "/" + finished_subdir)

      if not os.path.exists(obs_header_file):
        self.warn(obs_header_file + " did not exist")
        return ("fail", "obs.header file did not exist")
      obs_header = Config.readCFGFileIntoDict(obs_header_file)

      data = {}
      data["ProductType"] = "USEabcProduct"
      data["Description"] = self.extractKey(obs_header, "DESCRIPTION")
      data["SchedulelBockIdCode"] = self.extractKey(obs_header,"SCHEDULE_BLOCK_ID")
      data["ProposalId"] = self.extractKey(obs_header, "PROPOSAL_ID")

      utc_start = self.extractKey(obs_header, "UTC_START")
      data["StartTime"] = times.reformatUTCTime(utc_start, "%Y-%m-%dT%H:%M:%S%Z")
      data["Observer"] = self.extractKey(obs_header ,"OBSERVER")
      data["ProgramBlockId"] = self.extractKey(obs_header, "PROGRAM_BLOCK_ID")
      data["Duration"] = float(self.extractKey(obs_results, "length"))
      data["Bandwidth"] = float(self.extractKey(obs_header, "BW"))
      data["CenterFrequency"] = float(self.extractKey(obs_header, "FREQ"))
      data["NumFreqChannels"] = int(self.extractKey(obs_header, "NCHAN"))
      data["ChannelWidth"] = float(data["CenterFrequency"]) / float(data["NumFreqChannels"])

      json_data = json.dumps(data)
      fptr = open(data_file, 'w')
      fptr.write(json_data)
      fptr.close()

    else:
      self.debug("obs_info.json existed")

    return ("ok", "")
示例#4
0
  def generateObsInfoDat (self, finished_subdir, completed_subdir):

    obs_results_file = self.finished_dir + "/" + finished_subdir + "/obs.results"
    obs_header_file = self.completed_dir + "/" + completed_subdir + "/obs.header"
    obs_info_dat_file = self.completed_dir + "/" + completed_subdir + "/obs_info.dat"
   
    if not os.path.exists (obs_info_dat_file):

      self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat creating obs_info.dat")

      if os.path.exists(obs_results_file):
        self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat: reading " + obs_results_file)
        obs_results = Config.readCFGFileIntoDict(obs_results_file)
      else:
        self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat: generating results")
        obs_results = self.computeResults (self.finished_dir + "/" + finished_subdir)

      if not os.path.exists(obs_header_file):
        self.log (-1, "MeerKATArchiverDaemon::generateObsInfoDat: " + obs_header_file + " did not exist")
        return ("fail", "obs.header file did not exist")
      obs_header = Config.readCFGFileIntoDict(obs_header_file)

      obs_info_dat = {}
  
      obs_info_dat["observer"] = self.extractKey(obs_header ,"OBSERVER")
      obs_info_dat["program_block_id"] = self.extractKey(obs_header, "PROGRAM_BLOCK_ID")
      obs_info_dat["targets"] = "['" + self.extractKey(obs_header,"SOURCE") + "']"
      obs_info_dat["mode"] = self.extractKey(obs_header,"MODE")
      obs_info_dat["sb_id_code"] = self.extractKey(obs_header,"SCHEDULE_BLOCK_ID")
      obs_info_dat["target_duration"] = self.extractKey(obs_results, "length")
      obs_info_dat["target_snr"] = self.extractKey(obs_results, "snr")
      obs_info_dat["proposal_id"] = self.extractKey(obs_header, "PROPOSAL_ID")
      obs_info_dat["description"] = self.extractKey(obs_header, "DESCRIPTION")
      obs_info_dat["backend_args"] = "TBD"
      obs_info_dat["experiment_id"] = self.extractKey(obs_header, "EXPERIMENT_ID")
      obs_info_dat["adc_sync_time"] = self.extractKey(obs_header, "ADC_SYNC_TIME")
      obs_info_dat["precisetime_fraction"] = self.extractKey(obs_header, "PRECISETIME_FRACTION_AVG")
      obs_info_dat["utc_start_offset_picoseconds"] = self.extractKey(obs_header, "PICOSECONDS")

      fold_mode = self.extractKey(obs_header, "PERFORM_FOLD")
      search_mode = self.extractKey(obs_header, "PERFORM_SEARCH")

      Config.writeDictToColonSVFile(obs_info_dat, obs_info_dat_file)

    else:
      self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat obs_info.dat existed")

    return ("ok", "")
示例#5
0
    def acquire_obs_header(self, in_dir):
        """Generate the obs.header file for the whole band from sub-bands."""

        # test if already exists
        if os.path.exists(in_dir + "/obs.header"):
            self.log(
                2,
                "RepackDaemon::acquire_obs_header obs.header file already existed"
            )
            return (0, "")

        subband_freqs = self.get_subbands(in_dir)

        # start with header file from first sub-band
        if not os.path.exists(in_dir + "/" + subband_freqs[0] + "/obs.header"):
            self.log(
                2,
                "RepackDaemon::acquire_obs_header first sub-band obs.header did not exist"
            )
            return (1, "first sub-band header file did not exist")

        self.log(
            2, "RepackDaemon::acquire_obs_header header_file[0]=" + in_dir +
            "/" + subband_freqs[0] + "/obs.header")
        header = Config.readCFGFileIntoDict(in_dir + "/" + subband_freqs[0] +
                                            "/obs.header")

        # merge the headers from the other sub-bands
        for i in range(1, len(subband_freqs)):
            subband_header_file = in_dir + "/" + subband_freqs[
                i] + "/obs.header"
            self.log(
                2, "RepackDaemon::acquire_obs_header header_file[" + str(i) +
                "]=" + subband_header_file)
            if os.path.exists(subband_header_file):
                header_sub = Config.readCFGFileIntoDict(subband_header_file)
                header = Config.mergeHeaderFreq(header, header_sub)
            else:
                return (1, "not all sub-band header files present")

        # write the combined header
        self.log(
            2, "RepackDaemon::acquire_obs_header writing header to " + in_dir +
            "/" + "obs.header")
        Config.writeDictToCFGFile(header, in_dir + "/" + "obs.header")

        return (0, "")
示例#6
0
    def generateObsInfoDat(self, finished_subdir, completed_subdir):

        obs_results_file = self.finished_dir + "/" + finished_subdir + "/obs.results"
        obs_header_file = self.completed_dir + "/" + completed_subdir + "/obs.header"
        obs_info_dat_file = self.completed_dir + "/" + completed_subdir + "/obs_info.dat"

        if not os.path.exists(obs_info_dat_file):

            self.log(
                2,
                "MeerKATArchiverDaemon::generateObsInfoDat creating obs_info.dat"
            )

            obs_results = Config.readCFGFileIntoDict(obs_results_file)
            obs_header = Config.readCFGFileIntoDict(obs_header_file)
            obs_info_dat = {}

            obs_info_dat["observer"] = self.extractKey(obs_header, "OBSERVER")
            obs_info_dat["program_block_id"] = self.extractKey(
                obs_header, "PROGRAM_BLOCK_ID")
            obs_info_dat["targets"] = "['" + self.extractKey(
                obs_header, "SOURCE") + "']"
            obs_info_dat["mode"] = self.extractKey(obs_header, "MODE")
            obs_info_dat["sb_id_code"] = self.extractKey(
                obs_header, "SCHEDULE_BLOCK_ID")
            obs_info_dat["target_duration"] = self.extractKey(
                obs_results, "length")
            obs_info_dat["target_snr"] = self.extractKey(obs_results, "snr")
            obs_info_dat["proposal_id"] = self.extractKey(
                obs_header, "PROPOSAL_ID")
            obs_info_dat["description"] = self.extractKey(
                obs_header, "DESCRIPTION")
            obs_info_dat["backend_args"] = "TBD"
            obs_info_dat["experiment_id"] = self.extractKey(
                obs_header, "EXPERIMENT_ID")

            Config.writeDictToColonSVFile(obs_info_dat, obs_info_dat_file)

        else:
            self.log(
                2,
                "MeerKATArchiverDaemon::generateObsInfoDat obs_info.dat existed"
            )

        return ("ok", "")
示例#7
0
  def get_subbands (self, obs_dir):

    subband_freqs = []

    # read obs.info file to find the subbands
    if not os.path.exists (obs_dir + "/obs.info"):
      self.log(0, "RepackDaemon::get_subbands " + obs_dir + "/obs.info file did not exist")
      return subband_freqs

    info = Config.readCFGFileIntoDict (obs_dir + "/obs.info")
    num_streams = info["NUM_STREAM"]
    for i in range(int(num_streams)):
      (freq, bw, beam) = info["SUBBAND_" + str(i)].split(":")
      subband_freqs.append(freq)
    self.log(2, "RepackDaemon::get_subbands subband_freqs=" + str(subband_freqs))
    return subband_freqs
示例#8
0
  def patch_psrfits_header (self, input_dir, input_file):

    header_file = input_dir + "/obs.header"
    self.log(3, "patch_psrfits_header: header_file="+header_file)

    header = Config.readCFGFileIntoDict (input_dir + "/obs.header")

    new = {}
    new["obs:observer"] = header["OBSERVER"] 
    new["obs:projid"]   = header["PID"]
    new["be:nrcvr"]     = "2"
    new["be:phase"]     = header["BACKEND_PHASE"] # Phase convention of backend

    # need to know what these mean!
    new["rcvr:hand"]    = header["RCVR_HAND"] # handedness of the receiver
    new["rcvr:sa"]      = "0"     # Advised by D.Manchester
    new["be:tcycle"]    = "8"     # Correlator cycle time
    new["be:dcc"]       = "1"
    new["sub:nsblk"]    = "1"     # Samples/row (SEARCH mode, else 1)
  
    # this needs to come from CAM, hack for now
    new["ext:trk_mode"] = "TRACK" # Tracking mode
    new["ext:bpa"]      = "0" # Beam position angle [?]
    new["ext:bmaj"]     = "0" # Beam major axis [degrees]
    new["ext:bmin"]     = "0" # Beam minor axis [degrees]

    # 31-10-2018 Dick Manchester requested that these parameters be set to 
    # the values output by the PFB FPGAs, not what DSPSR sees in the header
    (freq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + str(self.id)].split(":")
    new["ext:obsfreq"]  = freq
    new["ext:obsbw"]    = bw
    new["ext:obsnchan"] = nchan

    new["ext:stp_crd1"] = header["RA"]
    new["ext:stp_crd2"] = header["DEC"]
    new["ext:stt_date"] = header["UTC_START"][0:10]
    new["ext:stt_time"] = header["UTC_START"][11:19]

    # create the psredit command necessary to apply "new"
    cmd = "psredit -m -c " + ",".join(['%s=%s' % (key, value) for (key, value) in new.items()]) + " " + input_file
    rval, lines = self.system(cmd, 2)
    if rval:
      return rval, lines[0]
    return 0, ""
示例#9
0
    def get_subbands(self, obs_dir):

        subband_freqs = []

        # read obs.info file to find the subbands
        if not os.path.exists(obs_dir + "/obs.info"):
            self.log(
                0, "RepackDaemon::get_subbands " + obs_dir +
                "/obs.info file did not exist")
            return subband_freqs

        info = Config.readCFGFileIntoDict(obs_dir + "/obs.info")
        num_streams = info["NUM_STREAM"]
        for i in range(int(num_streams)):
            (freq, bw, beam) = info["SUBBAND_" + str(i)].split(":")
            subband_freqs.append(freq)
        self.log(
            2,
            "RepackDaemon::get_subbands subband_freqs=" + str(subband_freqs))
        return subband_freqs
示例#10
0
  def get_out_cfreq (self, obs_dir):

    # read obs.info file to find the subbands
    if not os.path.exists (obs_dir + "/obs.info"):
      self.log(0, "RepackDaemon::get_out_cfreq obs.info file did not exist")
      return (False, 0)

    info = Config.readCFGFileIntoDict (obs_dir + "/obs.info")
    num_streams = info["NUM_STREAM"]
    freq_low  = float(1e12)
    freq_high = float(-1e12)

    for i in range(int(num_streams)):
      (freq, bw, beam) = info["SUBBAND_" + str(i)].split(":")
      freq_low  = min (freq_low, float(freq) - (float(bw)/2.0))
      freq_high = max (freq_high, float(freq) + (float(bw)/2.0))

    cfreq = int(freq_low + ((freq_high - freq_low) / 2.0))
    self.log(2, "RepackDaemon::get_out_cfreq low=" + str(freq_low) + " high=" + str(freq_high) + " cfreq=" + str(cfreq))
    return (True, cfreq)
示例#11
0
  def patch_psrfits_header (self, input_dir, input_file):

    header_file = input_dir + "/obs.header"
    self.log(3, "patch_psrfits_header: header_file="+header_file)

    header = Config.readCFGFileIntoDict (input_dir + "/obs.header")

    new = {}
    new["obs:observer"] = header["OBSERVER"] 
    new["obs:projid"]   = header["PID"]

    # constants that currently do not flow through CAM
    new["be:nrcvr"]     = "2"

    # need to know what these mean!
    new["be:phase"]     = "+1"    # Phase convention of backend
    new["be:tcycle"]    = "8"     # Correlator cycle time
    new["be:dcc"]       = "0"     # Downconversion conjugation corrected
    new["sub:nsblk"]    = "1"     # Samples/row (SEARCH mode, else 1)
  
    # this needs to come from CAM, hack for now
    new["ext:trk_mode"] = "TRACK" # Tracking mode
    new["ext:bpa"]      = "0" # Beam position angle [?]
    new["ext:bmaj"]     = "0" # Beam major axis [degrees]
    new["ext:bmin"]     = "0" # Beam minor axis [degrees]

    new["ext:obsfreq"]  = header["FREQ"]
    new["ext:obsbw"]    = header["BW"]
    new["ext:obsnchan"] = header["NCHAN"]

    new["ext:stp_crd1"] = header["RA"]
    new["ext:stp_crd2"] = header["DEC"]
    new["ext:stt_date"] = header["UTC_START"][0:10]
    new["ext:stt_time"] = header["UTC_START"][11:19]

    # create the psredit command necessary to apply "new"
    cmd = "psredit -m -c " + ",".join(['%s=%s' % (key, value) for (key, value) in new.items()]) + " " + input_file
    rval, lines = self.system(cmd, 2)
    if rval:
      return rval, lines[0]
    return 0, ""
示例#12
0
    def get_out_cfreq(self, obs_dir):

        # read obs.info file to find the subbands
        if not os.path.exists(obs_dir + "/obs.info"):
            self.log(
                0, "RepackDaemon::get_out_cfreq obs.info file did not exist")
            return (False, 0)

        info = Config.readCFGFileIntoDict(obs_dir + "/obs.info")
        num_streams = info["NUM_STREAM"]
        freq_low = float(1e12)
        freq_high = float(-1e12)

        for i in range(int(num_streams)):
            (freq, bw, beam) = info["SUBBAND_" + str(i)].split(":")
            freq_low = min(freq_low, float(freq) - (float(bw) / 2.0))
            freq_high = max(freq_high, float(freq) + (float(bw) / 2.0))

        cfreq = int(freq_low + ((freq_high - freq_low) / 2.0))
        self.log(
            2, "RepackDaemon::get_out_cfreq low=" + str(freq_low) + " high=" +
            str(freq_high) + " cfreq=" + str(cfreq))
        return (True, cfreq)
示例#13
0
  def collect_data(self, dir, beam, utc_start, source):

    data = self.results[utc_start][source]
 
    data["beam"] = beam 
    data["utc_start"] = utc_start
    data["source"] = source
    data["index"] = self.source_number
    self.source_number += 1

    # find the header filename
    cmd = "find " + dir + " -mindepth 1 -maxdepth 1 -type f -name 'obs.header*' | head -n 1"
    rval, lines = self.system (cmd, 3)
    if rval:
      return ("fail", data)
  
    header_file = lines[0]
    self.log (3, "collect_data: header_file=" + header_file)

    # read the contents of the header
    header = Config.readCFGFileIntoDict (header_file)

    data["centre_frequency"] = header["FREQ"]
    data["bandwidth"] = header["BW"]
    data["nchannels"] = header["NCHAN"]
    data["ra"] = header["RA"]
    data["dec"] = header["DEC"]
    data["mode"] = header["MODE"]
    data["project_id"] = header["PID"]
    data["subarray_id"] = "N/A"
    data["dir"] = dir
    data["length"] = "-1"
    data["snr"] = "-1"

    # convert entire header into XML
    data["header"] = ""
    keys = header.keys()
    keys.sort()
    for key in keys:
      data["header"] += "<" + key + ">" + header[key] + "</" + key + ">"

    psrplot_opts = "-c x:view='(0.0,1.0)' -c y:view='(0.0,1.0)' -g 160x120 -D -/png"

    time_sum_file = dir + "/time.sum"
    # find the path to the archives for plotting
    if os.path.exists(time_sum_file):
      data["time_sum"] = time_sum_file

      data["time_vs_phase"] = {}
      data["time_vs_phase"]["xres"] = 160
      data["time_vs_phase"]["yres"] = 120

      time_plot_file = dir + "/time.png"
      # if the plot does not exist, create it
      if not os.path.exists (time_plot_file):
        cmd = "psrplot -p time " + time_sum_file + " -jDp " + psrplot_opts
        rval, data["time_vs_phase"]["raw"] = self.system_raw (cmd, 3)
        if rval < 0:
          return (rval, "failed to generate time plot")
        fptr = open (time_plot_file, "wb")
        fptr.write(data["time_vs_phase"]["raw"])
        fptr.close()

      # read the created plot from the file system
      else:
        rval, data["time_vs_phase"]["raw"] = self.system_raw ("cat " + dir +"/time.png", 3)

    freq_sum_file = dir + "/freq.sum"
    if os.path.exists(freq_sum_file):
      data["freq_sum"] = freq_sum_file

      # generate the freq plot
      data["freq_vs_phase"] = {}
      data["freq_vs_phase"]["xres"] = 160
      data["freq_vs_phase"]["yres"] = 120

      freq_plot_file = dir + "/freq.png"
      if not os.path.exists (freq_plot_file):
        cmd = "psrplot -p freq " + freq_sum_file + " -jDp " + psrplot_opts
        rval, data["freq_vs_phase"]["raw"] = self.system_raw (cmd, 3)
        if rval < 0:
          return (rval, "failed to generate freq.png")
        fptr = open (freq_plot_file, "wb")
        fptr.write(data["freq_vs_phase"]["raw"])
        fptr.close()
      else:
        rval, data["freq_vs_phase"]["raw"] = self.system_raw ("cat " + dir +"/freq.png", 3)

      # generate the flux plot
      data["flux_vs_phase"] = {}
      data["flux_vs_phase"]["xres"] = 160
      data["flux_vs_phase"]["yres"] = 120

      flux_plot_file = dir + "/flux.png"
      if not os.path.exists (flux_plot_file):
        cmd = "psrplot -p flux " + freq_sum_file + " -jFDp " + psrplot_opts
        rval, data["flux_vs_phase"]["raw"] = self.system_raw (cmd, 3)
        if rval < 0:
          return (rval, "failed to create flux plot")
        fptr = open (flux_plot_file, "wb")
        fptr.write(data["flux_vs_phase"]["raw"])
        fptr.close()
      else:
        rval, data["flux_vs_phase"]["raw"] = self.system_raw ("cat " + dir +"/flux.png", 3)

    band_file = dir + "/band.last"
    if os.path.exists(band_file):
      data["band_last"] = band_file

      data["bandpass"] = {}
      data["bandpass"]["xres"] = 160
      data["bandpass"]["yres"] = 120
      band_plot_file = dir + "/band.png"
      if not os.path.exists (band_plot_file):
        cmd = "psrplot -p b " + band_file + " -x -lpol=0,1 -N2,1 " + psrplot_opts
        rval, data["bandpass"]["raw"] = self.system_raw (cmd, 3)
        if rval < 0:
          return (rval, "failed to create band plot")
        fptr = open (band_plot_file, "wb")
        fptr.write(data["bandpass"]["raw"])
        fptr.close()
      else:
        rval, data["bandpass"]["raw"] = self.system_raw ("cat " + band_plot_file, 3)

    # find the resultsfilename
    results_file = dir + "/obs.results"
    if os.path.exists(results_file):
      self.log (3, "collect_data: results_file=" + results_file)
      results = Config.readCFGFileIntoDict (results_file)
      data["snr"] = results["snr"]
      data["length"] = results["length"]
    else:
      if os.path.exists(freq_sum_file):
        cmd = "psrstat -jFDp -c snr " + freq_sum_file + " | awk -F= '{printf(\"%f\",$2)}'"
        rval, lines = self.system (cmd, 3)
        if rval < 0:
          return (rval, "failed to extract snr from freq.sum")
        data["snr"] = lines[0]

      # determine the length of the observation
      if os.path.exists(time_sum_file):
        cmd = "psrstat -c length " + time_sum_file + " | awk -F= '{printf(\"%f\",$2)}'"
        rval, lines = self.system (cmd, 3)
        if rval < 0:
          return (rval, "failed to extract length from time.sum")
        data["length"] = lines[0]

      # write these values to the sum file
      fptr = open (results_file, "w")
      fptr.write("snr\t" + data["snr"] + "\n")
      fptr.write("length\t" + data["length"] + "\n")
      fptr.close()

    return ("ok", "collected")
示例#14
0
文件: spip_tcs.py 项目: ajameson/spip
  def load_finished (self):

    # read the most recently finished observations
    for b in self.beam_states.keys():

      # TODO check this for SERVER / BEAM
      beam_dir = self.fold_dir + "/finished/" + b

      cmd = "find " + beam_dir + " -mindepth 2 -maxdepth 2 -type d | sort | tail -n 1"
      rval, observation = self.system (cmd, 3)

      if len(observation) == 0:
        return

      # strip prefix 
      observation = observation[0][(len(beam_dir)+1):]

      self.log (1, "main: " + observation)
      (utc, source) = observation.split("/")

      obs_dir = beam_dir + "/" + utc + "/" + source

      self.log(2, "load_finished: reading configuration for " + b + "/" + utc + "/" + source)

      if os.path.exists (obs_dir + "/obs.header"):
        header = Config.readCFGFileIntoDict(obs_dir + "/obs.header")
        self.beam_states[b]["lock"].acquire()

        self.beam_states[b]["config"]["source_parameters"]["name"]["#text"] = header["SOURCE"]
        self.beam_states[b]["config"]["source_parameters"]["name"]["@epoch"] = "J2000"

        self.beam_states[b]["config"]["source_parameters"]["ra"]["#text"] = header["RA"]
        self.beam_states[b]["config"]["source_parameters"]["ra"]["@units"] = "hh:mm:ss"
        self.beam_states[b]["config"]["source_parameters"]["dec"]["#text"] = header["DEC"]
        self.beam_states[b]["config"]["source_parameters"]["dec"]["@units"] = "dd:mm:ss"

        self.beam_states[b]["config"]["observation_parameters"]["observer"]["#text"] = header["OBSERVER"]
        self.beam_states[b]["config"]["observation_parameters"]["project_id"]["#text"] = header["PID"]
        self.beam_states[b]["config"]["observation_parameters"]["mode"]["#text"] = header["MODE"]
        self.beam_states[b]["config"]["observation_parameters"]["calfreq"]["#text"] = header["CALFREQ"]
        self.beam_states[b]["config"]["observation_parameters"]["tobs"]["#text"] = header["TOBS"]
        self.beam_states[b]["config"]["observation_parameters"]["utc_start"]["#text"] = header["UTC_START"]
        self.beam_states[b]["config"]["observation_parameters"]["utc_stop"]["#text"] = ""

        self.beam_states[b]["config"]["calibration_parameters"]["signal"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["freq"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["phase"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["duty_cycle"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["epoch"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["tsys_avg_time"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["tsys_freq_resolution"]["#text"] = ""

        self.load_header_param(b, "calibration_parameters", "signal", header, "CAL_SIGNAL")
        self.load_header_param(b, "calibration_parameters", "freq", header, "CAL_FREQ")
        self.load_header_param(b, "calibration_parameters", "phase", header, "CAL_PHASE")
        self.load_header_param(b, "calibration_parameters", "duty_cycle", header, "CAL_DUTY_CYCLE")
        self.load_header_param(b, "calibration_parameters", "epoch", header, "CAL_EPOCH")
        self.load_header_param(b, "calibration_parameters", "tsys_avg_time", header, "TSYS_AVG_TIME")
        self.load_header_param(b, "calibration_parameters", "tsys_freq_resolution", header, "TSYS_FREQ_RES")

        self.beam_states[b]["config"]["stream_configuration"]["nstream"]["#text"] = "0"

        self.beam_states[b]["state"] = "Idle"

        self.beam_states[b]["lock"].release()
示例#15
0
    def load_finished(self):

        # read the most recently finished observations
        for b in self.beam_states.keys():

            # TODO check this for SERVER / BEAM
            beam_dir = self.fold_dir + "/finished/" + b

            cmd = "find " + beam_dir + " -mindepth 2 -maxdepth 2 -type d | sort | tail -n 1"
            rval, observation = self.system(cmd, 3)

            if len(observation) == 0:
                return

            # strip prefix
            observation = observation[0][(len(beam_dir) + 1):]

            self.log(1, "main: " + observation)
            (utc, source) = observation.split("/")

            obs_dir = beam_dir + "/" + utc + "/" + source

            self.log(
                2, "load_finished: reading configuration for " + b + "/" +
                utc + "/" + source)

            if os.path.exists(obs_dir + "/obs.header"):
                header = Config.readCFGFileIntoDict(obs_dir + "/obs.header")
                self.beam_states[b]["lock"].acquire()

                self.beam_states[b]["config"]["source_parameters"]["name"][
                    "#text"] = header["SOURCE"]
                self.beam_states[b]["config"]["source_parameters"]["name"][
                    "@epoch"] = "J2000"

                self.beam_states[b]["config"]["source_parameters"]["ra"][
                    "#text"] = header["RA"]
                self.beam_states[b]["config"]["source_parameters"]["ra"][
                    "@units"] = "hh:mm:ss"
                self.beam_states[b]["config"]["source_parameters"]["dec"][
                    "#text"] = header["DEC"]
                self.beam_states[b]["config"]["source_parameters"]["dec"][
                    "@units"] = "dd:mm:ss"

                self.beam_states[b]["config"]["observation_parameters"][
                    "observer"]["#text"] = header["OBSERVER"]
                self.beam_states[b]["config"]["observation_parameters"][
                    "project_id"]["#text"] = header["PID"]
                self.beam_states[b]["config"]["observation_parameters"][
                    "mode"]["#text"] = header["MODE"]
                self.beam_states[b]["config"]["observation_parameters"][
                    "calfreq"]["#text"] = header["CALFREQ"]
                self.beam_states[b]["config"]["observation_parameters"][
                    "tobs"]["#text"] = header["TOBS"]
                self.beam_states[b]["config"]["observation_parameters"][
                    "utc_start"]["#text"] = header["UTC_START"]
                self.beam_states[b]["config"]["observation_parameters"][
                    "utc_stop"]["#text"] = ""

                self.beam_states[b]["config"]["calibration_parameters"][
                    "signal"]["#text"] = ""
                self.beam_states[b]["config"]["calibration_parameters"][
                    "freq"]["#text"] = ""
                self.beam_states[b]["config"]["calibration_parameters"][
                    "phase"]["#text"] = ""
                self.beam_states[b]["config"]["calibration_parameters"][
                    "duty_cycle"]["#text"] = ""
                self.beam_states[b]["config"]["calibration_parameters"][
                    "epoch"]["#text"] = ""
                self.beam_states[b]["config"]["calibration_parameters"][
                    "tsys_avg_time"]["#text"] = ""
                self.beam_states[b]["config"]["calibration_parameters"][
                    "tsys_freq_resolution"]["#text"] = ""

                self.load_header_param(b, "calibration_parameters", "signal",
                                       header, "CAL_SIGNAL")
                self.load_header_param(b, "calibration_parameters", "freq",
                                       header, "CAL_FREQ")
                self.load_header_param(b, "calibration_parameters", "phase",
                                       header, "CAL_PHASE")
                self.load_header_param(b, "calibration_parameters",
                                       "duty_cycle", header, "CAL_DUTY_CYCLE")
                self.load_header_param(b, "calibration_parameters", "epoch",
                                       header, "CAL_EPOCH")
                self.load_header_param(b, "calibration_parameters",
                                       "tsys_avg_time", header,
                                       "TSYS_AVG_TIME")
                self.load_header_param(b, "calibration_parameters",
                                       "tsys_freq_resolution", header,
                                       "TSYS_FREQ_RES")

                self.beam_states[b]["config"]["stream_configuration"][
                    "nstream"]["#text"] = "0"

                self.beam_states[b]["state"] = "Idle"

                self.beam_states[b]["lock"].release()
示例#16
0
    def patch_psrfits_header(self, input_dir, input_file):

        header_file = input_dir + "/obs.header"
        self.log(3, "patch_psrfits_header: header_file=" + header_file)

        header = Config.readCFGFileIntoDict(input_dir + "/obs.header")

        new = {}
        new["obs:observer"] = header["OBSERVER"]
        new["obs:projid"] = header["PID"]

        new["be:nrcvr"] = header["NPOL"]

        try:
            val = header["RCVR_HAND"]
            new["rcvr:hand"] = val
        except KeyError as e:
            self.log(2, "patch_psrfits_header: RCVR_HAND not set in header")

        try:
            val = header["BACKEND_PHASE"]  # Phase convention of backend
            new["be:phase"] = val
        except KeyError as e:
            self.log(2,
                     "patch_psrfits_header: BACKEND_PHASE not set in header")

        try:
            val = header["FOLD_OUTTSUBINT"]  # Correlator cycle time
            new["be:tcycle"] = val
        except KeyError as e:
            self.log(
                2, "patch_psrfits_header: FOLD_OUTTSUBINT not set in header")

        new["be:dcc"] = "0"  # Downconversion conjugation corrected
        new["sub:nsblk"] = "1"  # Samples/row (SEARCH mode, else 1)

        # this needs to come from CAM, hack for now
        new["ext:trk_mode"] = "TRACK"  # Tracking mode
        new["ext:bpa"] = "0"  # Beam position angle [?]
        new["ext:bmaj"] = "0"  # Beam major axis [degrees]
        new["ext:bmin"] = "0"  # Beam minor axis [degrees]

        self.log(
            3,
            "RepackDaemon::patch_psrfits_header freq=" + str(header["FREQ"]))

        new["ext:obsfreq"] = header["FREQ"]
        new["ext:obsbw"] = header["BW"]
        new["ext:obsnchan"] = header["NCHAN"]

        new["ext:stt_crd1"] = header["RA"]
        new["ext:stt_crd2"] = header["DEC"]
        new["ext:stp_crd1"] = header["RA"]
        new["ext:stp_crd2"] = header["DEC"]
        new["ext:stt_date"] = header["UTC_START"][0:10]
        new["ext:stt_time"] = header["UTC_START"][11:19]

        # build psredit command, in-place modification
        cmd = "psredit -m"

        try:
            itrf = header["ITRF"]
            (x, y, z) = itrf.split(",")
            new["itrf:ant_x"] = x
            new["itrf:ant_y"] = y
            new["itrf:ant_z"] = z
            cmd = cmd + " -a itrf"

        except KeyError as e:
            self.log(2, "patch_psrfits_header: ITRF not set in header")

        # create the psredit command necessary to apply "new"
        cmd = cmd + " -c " + ",".join(
            ['%s=%s' % (key, value)
             for (key, value) in new.items()]) + " " + input_file
        rval, lines = self.system(cmd, 2)
        if rval:
            return rval, lines[0]
        return 0, ""
示例#17
0
    def collect_data(self, dir, beam, utc_start, source):

        data = self.results[utc_start][source]

        data["beam"] = beam
        data["utc_start"] = utc_start
        data["source"] = source
        data["index"] = self.source_number
        self.source_number += 1

        # find the header filename
        cmd = "find " + dir + " -mindepth 1 -maxdepth 1 -type f -name 'obs.header*' | head -n 1"
        rval, lines = self.system(cmd, 3)
        if rval:
            return ("fail", data)

        if not len(lines) == 1:
            return ("fail", data)
        header_file = lines[0]

        self.log(3, "collect_data: header_file=" + header_file)

        # read the contents of the header
        header = Config.readCFGFileIntoDict(header_file)

        data["centre_frequency"] = header["FREQ"]
        data["bandwidth"] = header["BW"]
        data["nchannels"] = header["NCHAN"]
        data["ra"] = header["RA"]
        data["dec"] = header["DEC"]
        data["mode"] = header["MODE"]
        data["project_id"] = header["PID"]
        data["subarray_id"] = "N/A"
        data["dir"] = dir
        data["length"] = "-1"
        data["snr"] = "-1"

        # convert entire header into XML
        data["header"] = ""
        keys = header.keys()
        keys.sort()
        for key in keys:
            data["header"] += "<" + key + ">" + header[key] + "</" + key + ">"

        psrplot_opts = "-c x:view='(0.0,1.0)' -c y:view='(0.0,1.0)' -g 160x120 -D -/png"

        time_sum_file = dir + "/time.sum"
        # find the path to the archives for plotting
        if os.path.exists(time_sum_file):
            data["time_sum"] = time_sum_file

            data["time_vs_phase"] = {}
            data["time_vs_phase"]["xres"] = 160
            data["time_vs_phase"]["yres"] = 120

            time_plot_file = dir + "/time.png"
            # if the plot does not exist, create it
            if not os.path.exists(time_plot_file):
                cmd = "psrplot -p time " + time_sum_file + " -jDp " + psrplot_opts
                rval, data["time_vs_phase"]["raw"] = self.system_raw(cmd, 3)
                if rval < 0:
                    return (rval, "failed to generate time plot")
                fptr = open(time_plot_file, "wb")
                fptr.write(data["time_vs_phase"]["raw"])
                fptr.close()

            # read the created plot from the file system
            else:
                rval, data["time_vs_phase"]["raw"] = self.system_raw(
                    "cat " + dir + "/time.png", 3)

        freq_sum_file = dir + "/freq.sum"
        if os.path.exists(freq_sum_file):
            data["freq_sum"] = freq_sum_file

            # generate the freq plot
            data["freq_vs_phase"] = {}
            data["freq_vs_phase"]["xres"] = 160
            data["freq_vs_phase"]["yres"] = 120

            freq_plot_file = dir + "/freq.png"
            if not os.path.exists(freq_plot_file):
                cmd = "psrplot -p freq " + freq_sum_file + " -jDp " + psrplot_opts
                rval, data["freq_vs_phase"]["raw"] = self.system_raw(cmd, 3)
                if rval < 0:
                    return (rval, "failed to generate freq.png")
                fptr = open(freq_plot_file, "wb")
                fptr.write(data["freq_vs_phase"]["raw"])
                fptr.close()
            else:
                rval, data["freq_vs_phase"]["raw"] = self.system_raw(
                    "cat " + dir + "/freq.png", 3)

            # generate the flux plot
            data["flux_vs_phase"] = {}
            data["flux_vs_phase"]["xres"] = 160
            data["flux_vs_phase"]["yres"] = 120

            flux_plot_file = dir + "/flux.png"
            if not os.path.exists(flux_plot_file):
                cmd = "psrplot -p flux " + freq_sum_file + " -jFDp " + psrplot_opts
                rval, data["flux_vs_phase"]["raw"] = self.system_raw(cmd, 3)
                if rval < 0:
                    return (rval, "failed to create flux plot")
                fptr = open(flux_plot_file, "wb")
                fptr.write(data["flux_vs_phase"]["raw"])
                fptr.close()
            else:
                rval, data["flux_vs_phase"]["raw"] = self.system_raw(
                    "cat " + dir + "/flux.png", 3)

        band_file = dir + "/band.last"
        if os.path.exists(band_file):
            data["band_last"] = band_file

            data["bandpass"] = {}
            data["bandpass"]["xres"] = 160
            data["bandpass"]["yres"] = 120
            band_plot_file = dir + "/band.png"
            if not os.path.exists(band_plot_file):
                cmd = "psrplot -p b " + band_file + " -x -lpol=0,1 -N2,1 -c log=1 " + psrplot_opts
                rval, data["bandpass"]["raw"] = self.system_raw(cmd, 3)
                if rval < 0:
                    return (rval, "failed to create band plot")
                fptr = open(band_plot_file, "wb")
                fptr.write(data["bandpass"]["raw"])
                fptr.close()
            else:
                rval, data["bandpass"]["raw"] = self.system_raw(
                    "cat " + band_plot_file, 3)

        # find the results filename
        results_file = dir + "/obs.results"
        if os.path.exists(results_file):
            self.log(3, "collect_data: results_file=" + results_file)
            results = Config.readCFGFileIntoDict(results_file)
            data["snr"] = results["snr"]
            data["length"] = results["length"]
        else:
            if os.path.exists(freq_sum_file):
                cmd = "psrstat -jFDp -c snr " + freq_sum_file + " | awk -F= '{printf(\"%f\",$2)}'"
                rval, lines = self.system(cmd, 3)
                if rval < 0:
                    return (rval, "failed to extract snr from freq.sum")
                data["snr"] = lines[0]

            # determine the length of the observation
            if os.path.exists(time_sum_file):
                cmd = "psrstat -c length " + time_sum_file + " | awk -F= '{printf(\"%f\",$2)}'"
                rval, lines = self.system(cmd, 3)
                if rval < 0:
                    return (rval, "failed to extract length from time.sum")
                data["length"] = lines[0]

            # write these values to the sum file
            fptr = open(results_file, "w")
            fptr.write("snr\t" + data["snr"] + "\n")
            fptr.write("length\t" + data["length"] + "\n")
            fptr.close()

        return ("ok", "collected")
示例#18
0
  def main (self):

    archives_glob = "*.ar"

    self.log (2, "main: beams=" + str(self.beams))

    # archives stored in directory structure
    #  beam / utc_start / source / cfreq / "fold"

    # summary data stored in
    #  beam / utc_start / source / freq.sum
    # out_cfreq = 0

    if not os.path.exists(self.processing_dir):
      os.makedirs(self.processing_dir, 0755) 
    if not os.path.exists(self.finished_dir):
      os.makedirs(self.finished_dir, 0755) 
    if not os.path.exists(self.archived_dir):
      os.makedirs(self.archived_dir, 0755) 

    self.log (2, "main: stream_id=" + str(self.id))

    while (not self.quit_event.isSet()):

      processed_this_loop = 0

      # check each beam for folded archives to process    
      for beam in self.beams:

        beam_dir = self.processing_dir + "/" + beam
        self.log (3, "main: beam=" + beam + " beam_dir=" + beam_dir)

        if not os.path.exists(beam_dir):
          os.makedirs(beam_dir, 0755)

        # get a list of all the recent observations
        cmd = "find " + beam_dir + " -mindepth 2 -maxdepth 2 -type d"
        rval, observations = self.system (cmd, 3)

        # for each observation      
        for observation in observations:
   
          # strip prefix 
          observation = observation[(len(beam_dir)+1):]

          (utc, source) = observation.split("/")

          if source == "stats":
            continue

          obs_dir = beam_dir + "/" + observation
          out_dir = self.archived_dir + "/" + beam + "/" + utc + "/" + source + "/" + str(self.out_cfreq)

          if not os.path.exists(out_dir):
            os.makedirs(out_dir, 0755)

          # if we have only 1 sub-band, then files can be processed immediately
          archives = {}
          for subband in self.subbands:
            self.log (3, "processing subband=" + str(subband))
            
            cmd = "find " + obs_dir + "/" + subband["cfreq"] + " -mindepth 1 -maxdepth 1 " + \
                  "-type f -name '" + archives_glob + "' -printf '%f\\n'"
            rval, files = self.system (cmd, 3)

            for file in files:
              if not file in archives:
                archives[file] = 0
              archives[file] += 1

          # if a file meets the subband count it is ripe for processing
          files = archives.keys()
          files.sort()

          for file in files:

            processed_this_loop += 1

            self.log (1, observation + ": processing " + file)

            if archives[file] == len(self.subbands):
              if len(self.subbands) > 1:
                self.log (2, "main: process_subband()")
                (rval, response) = self.process_subband (obs_dir, out_dir, source, file)
                if rval:
                  self.log (-1, "failed to process sub-bands for " + file + ": " + response)
              else:
                input_file  = obs_dir  + "/" + self.subbands[0]["cfreq"] + "/" + file
                self.log (2, "main: process_archive() "+ input_file)
                (rval, response) = self.process_archive (obs_dir, input_file, out_dir, source)
                if rval:
                  self.log (-1, "failed to process " + file + ": " + response)

          if len(files) > 0:
            # now process the sum files to produce plots etc
            self.log (2, "main: process_observation("+beam+","+utc+","+source+","+obs_dir+")")
            (rval, response) = self.process_observation (beam, utc, source, obs_dir)
            if rval:
              self.log (-1, "failed to process observation: " + response)

          # if the proc has marked this observation as finished
          all_finished = True
          any_failed = False

          # perhaps a file was produced whilst the previous list was being processed,
          # do another pass
          if len(files) > 0:
            all_finished = False

          for subband in self.subbands:
            filename = obs_dir + "/" + subband["cfreq"] + "/obs.finished"
            if os.path.exists(filename):
              if os.path.getmtime(filename) + 10 > time.time():
                all_finished = False
            else:
              all_finished = False
            filename = obs_dir + "/" + subband["cfreq"] + "/obs.failed"
            if os.path.exists(filename):
              any_failed = True
         
          # the observation has failed, cleanup
          if any_failed:
            self.log (1, observation + ": processing -> failed")
            all_finished = False

            fail_parent_dir = self.failed_dir + "/" + beam + "/" + utc
            if not os.path.exists(fail_parent_dir):
              os.makedirs(fail_parent_dir, 0755)
            fail_dir = self.failed_dir + "/" + beam + "/" + utc + "/" + source
            self.log (2, "main: fail_observation("+obs_dir+")")
            (rval, response) = self.fail_observation (beam, obs_dir, fail_dir, out_dir)
            if rval:
              self.log (-1, "failed to finalise observation: " + response)

          # The observation has finished, cleanup
          if all_finished: 
            self.log (1, observation + ": processing -> finished")

            fin_parent_dir = self.finished_dir + "/" + beam + "/" + utc
            if not os.path.exists(fin_parent_dir):
              os.makedirs(fin_parent_dir, 0755)

            fin_dir = self.finished_dir + "/" + beam + "/" + utc + "/" + source
            self.log (2, "main: finalise_observation("+obs_dir+")")
            (rval, response) = self.finalise_observation (beam, obs_dir, fin_dir, out_dir)
            if rval:
              self.log (-1, "failed to finalise observation: " + response)
            else:

              # merge the headers from each sub-band
              header = Config.readCFGFileIntoDict (fin_dir + "/" + self.subbands[0]["cfreq"] + "/obs.header")
              for i in range(1,len(self.subbands)):
                header_sub = Config.readCFGFileIntoDict (fin_dir + "/" + self.subbands[i]["cfreq"] + "/obs.header")
                header = Config.mergerHeaderFreq (header, header_sub)
                os.remove (fin_dir + "/" + self.subbands[i]["cfreq"] + "/obs.header")
                os.remove (fin_dir + "/" + self.subbands[i]["cfreq"] + "/obs.finished")
                os.removedirs (fin_dir + "/" + self.subbands[i]["cfreq"])
              os.remove (fin_dir + "/" + self.subbands[0]["cfreq"] + "/obs.header")
              os.remove (fin_dir + "/" + self.subbands[0]["cfreq"] + "/obs.finished")
              os.removedirs (fin_dir + "/" + self.subbands[0]["cfreq"])

              Config.writeDictToCFGFile (header, fin_dir + "/" + "obs.header")
              shutil.copyfile (fin_dir + "/obs.header", out_dir + "/obs.header")


      if processed_this_loop == 0:
        self.log (3, "time.sleep(1)")
        time.sleep(1)
示例#19
0
  def main (self):

    self.debug("starting main loop")

    while not self.quit_event.isSet():

      # for each directory that has a completed dir
      for proc_type in self.proc_types:

        self.debug("proc_type=" + proc_type)

        # for each configured beam (there is only 1 for UWB)
        for beam in self.beams:

          self.debug("beam=" + beam)

          if self.quit_event.isSet():
            self.info("quit_event true [1]")
            continue

          # the input and output directories
          send_dir = self.send_dirs[proc_type] + "/" + beam
          junk_dir = self.junk_dirs[proc_type] + "/" + beam
          sent_dir = self.sent_dirs[proc_type] + "/" + beam

          if not os.path.exists(send_dir):
            self.warn("send_dir [" + send_dir + "] did not exist")
            os.makedirs(send_dir, 0755)

          if not os.path.exists(sent_dir):
            self.warn("sent_dir [" + sent_dir + "] did not exist")
            os.makedirs(sent_dir, 0755)

          if not os.path.exists(junk_dir):
            self.warn("sent_dir [" + junk_dir + "] did not exist")
            os.makedirs(junk_dir, 0755)

          # look for observations that have been completed and have / BEAM / utc / source / CFREQ
          self.debug("looking for obs.finished in " + send_dir + "/<UTC>/<SOURCE>/" + self.cfreq)
          cmd = "find " + send_dir + " -type f -path '*/" + self.cfreq + "/obs.finished' -mmin +1 | sort"
          rval, fin_files = self.system(cmd, 2)
          if rval:
            self.warn("find command failed: " + fin_files[0])
            sleep(1)
            continue
      
          self.debug("assessing obs.finished observations") 
          # transfer the completed directory to herschel
          for path in fin_files:

            if self.quit_event.isSet():
              self.info("quit_event true [2]")
              continue

            # strip dir prefix
            subpath = path [(len(send_dir)+1):] 

            # extract the the beam, utc, source and cfreq
            (utc, source, cfreq, file) = subpath.split("/")
            utc_source = utc + "/" + source

            self.debug("found obs to transfer " + utc_source)

            # finished and completed directories
            completed_subdir = utc_source + "/" + cfreq

            # determine the size of the data to be transferred
            cmd = "du -sb " + send_dir + "/" + completed_subdir + " | awk '{print $1}'"
            rval, size = self.system(cmd, 2)
            if rval:
              self.warn("failed to determine size of " + completed_subdir)
            else:
              self.debug("transferring " + (str(float(size[0])/1048576)) + " MB")
      
            # change to the beam directory
            os.chdir (send_dir)

            transfer = True
            # check the header file
            header_file = send_dir + "/" + completed_subdir + "/obs.header"
            if os.path.exists (header_file):
              header = Config.readCFGFileIntoDict (header_file)
              self.debug("utc=" + utc + " source=" + source + " pid=" + header["PID"])
              if header["PID"] == "P999":
                transfer = False

            if transfer:
            
              self.debug("get_rsync_from_stream (" + str(self.id) + ")")
              (username, server, module) = self.get_rsync_from_stream (self.id)
              self.debug("rsync stream=" + str(self.id)+ " user="******" server=" + server + " module=" + module)

              # build the rsync command TODO handle fold/search/etc
              cmd = "rsync ./" + completed_subdir + " " + \
                    username + "@" + server + "::" + module + "/" + proc_type + "/ " + \
                    self.rsync_options + " --exclude='obs.finished'"
  
              # run the rsync command
              transfer_rate = ""
              transfer_success = True
              rval, lines = self.system (cmd, 2)
              if rval:
                transfer_success = False
                self.warn("failed to transfer " + completed_subdir)
                # TODO add support for terminating the transfer early

              else:

                # parse the transfer speed
                for line in lines:
                  if line.find ("bytes/sec") != -1:
                    transfer_rate = line
  
                # transfer the obs.finished file
                cmd = "rsync ./" + completed_subdir + "/obs.finished " + \
                      username + "@" + server + "::" + \
                      module + "/" + proc_type + "/ " + self.rsync_options
  
                # run the rsync command
                rval, size = self.system (cmd, 2)
                if rval:
                  transfer_success = False
                  self.warn("failed to transfer " + completed_subdir + "/obs.finished")
             
                if transfer_success:
                  # create a parent directory in the transferred dir
                  try:
                    os.makedirs(sent_dir + "/" + utc_source, 0755)
                  except OSError, e:
                    self.debug(str(e))

                  # now move this observation from send to sent
                  cmd = "mv " + send_dir + "/" + utc_source + "/" + cfreq + " " + sent_dir + "/" + utc_source
                  rval, lines = self.system(cmd, 2)

                  self.clean_utc_source_dir (send_dir + "/" + utc_source)
                  self.info(proc_type + " " + utc_source + "/" + cfreq + " transferred to " + module + ": " + transfer_rate)
                else:
                  self.info(proc_type + " " + utc_source + "/" + cfreq + " failed to transfer")
            else:

              # create a parent directory in the transferred dir
              try:
                os.makedirs(junk_dir + "/" + utc_source, 0755)
              except OSError, e:
                self.debug(str(e))

              # now move this observation from send to junk
              cmd = "mv " + send_dir + "/" + utc_source + "/" + cfreq + " " + junk_dir + "/" + utc_source + "/"
              rval, lines = self.system(cmd, 2)

              self.clean_utc_source_dir (send_dir + "/" + utc_source)
              self.info(proc_type + " " + utc_source + "/" + cfreq + " junked")
示例#20
0
    def main(self):

        self.debug("starting main loop")

        while not self.quit_event.isSet():

            # for each directory that has a completed dir
            for proc_type in self.proc_types:

                self.debug("proc_type=" + proc_type)

                # for each configured beam (there is only 1 for UWB)
                for beam in self.beams:

                    self.debug("beam=" + beam)

                    if self.quit_event.isSet():
                        self.info("quit_event true [1]")
                        continue

                    # the input and output directories
                    send_dir = self.send_dirs[proc_type] + "/" + beam
                    junk_dir = self.junk_dirs[proc_type] + "/" + beam
                    sent_dir = self.sent_dirs[proc_type] + "/" + beam

                    if not os.path.exists(send_dir):
                        self.warn("send_dir [" + send_dir + "] did not exist")
                        os.makedirs(send_dir, 0755)

                    if not os.path.exists(sent_dir):
                        self.warn("sent_dir [" + sent_dir + "] did not exist")
                        os.makedirs(sent_dir, 0755)

                    if not os.path.exists(junk_dir):
                        self.warn("sent_dir [" + junk_dir + "] did not exist")
                        os.makedirs(junk_dir, 0755)

                    # look for observations that have been completed and have / BEAM / utc / source / CFREQ
                    self.debug("looking for obs.finished in " + send_dir +
                               "/<UTC>/<SOURCE>/" + self.cfreq)
                    cmd = "find " + send_dir + " -type f -path '*/" + self.cfreq + "/obs.finished' -mmin +1 | sort"
                    rval, fin_files = self.system(cmd, 2)
                    if rval:
                        self.warn("find command failed: " + fin_files[0])
                        sleep(1)
                        continue

                    self.debug("assessing obs.finished observations")
                    # transfer the completed directory to herschel
                    for path in fin_files:

                        if self.quit_event.isSet():
                            self.info("quit_event true [2]")
                            continue

                        # strip dir prefix
                        subpath = path[(len(send_dir) + 1):]

                        # extract the the beam, utc, source and cfreq
                        (utc, source, cfreq, file) = subpath.split("/")
                        utc_source = utc + "/" + source

                        self.debug("found obs to transfer " + utc_source)

                        # finished and completed directories
                        completed_subdir = utc_source + "/" + cfreq

                        # determine the size of the data to be transferred
                        cmd = "du -sb " + send_dir + "/" + completed_subdir + " | awk '{print $1}'"
                        rval, size = self.system(cmd, 2)
                        if rval:
                            self.warn("failed to determine size of " +
                                      completed_subdir)
                        else:
                            self.debug("transferring " +
                                       (str(float(size[0]) / 1048576)) + " MB")

                        # change to the beam directory
                        os.chdir(send_dir)

                        transfer = True
                        # check the header file
                        header_file = send_dir + "/" + completed_subdir + "/obs.header"
                        if os.path.exists(header_file):
                            header = Config.readCFGFileIntoDict(header_file)
                            self.debug("utc=" + utc + " source=" + source +
                                       " pid=" + header["PID"])
                            if header["PID"] == "P999":
                                transfer = False

                        if transfer:

                            self.debug("get_rsync_from_stream (" +
                                       str(self.id) + ")")
                            (username, server,
                             module) = self.get_rsync_from_stream(self.id)
                            self.debug("rsync stream=" + str(self.id) +
                                       " user="******" server=" +
                                       server + " module=" + module)

                            # build the rsync command TODO handle fold/search/etc
                            cmd = "rsync ./" + completed_subdir + " " + \
                                  username + "@" + server + "::" + module + "/" + proc_type + "/ " + \
                                  self.rsync_options + " --exclude='obs.finished'"

                            # run the rsync command
                            transfer_rate = ""
                            transfer_success = True
                            rval, lines = self.system(cmd, 2)
                            if rval:
                                transfer_success = False
                                self.warn("failed to transfer " +
                                          completed_subdir)
                                # TODO add support for terminating the transfer early

                            else:

                                # parse the transfer speed
                                for line in lines:
                                    if line.find("bytes/sec") != -1:
                                        transfer_rate = line

                                # transfer the obs.finished file
                                cmd = "rsync ./" + completed_subdir + "/obs.finished " + \
                                      username + "@" + server + "::" + \
                                      module + "/" + proc_type + "/ " + self.rsync_options

                                # run the rsync command
                                rval, size = self.system(cmd, 2)
                                if rval:
                                    transfer_success = False
                                    self.warn("failed to transfer " +
                                              completed_subdir +
                                              "/obs.finished")

                                if transfer_success:
                                    # create a parent directory in the transferred dir
                                    try:
                                        os.makedirs(
                                            sent_dir + "/" + utc_source, 0755)
                                    except OSError, e:
                                        self.debug(str(e))

                                    # now move this observation from send to sent
                                    cmd = "mv " + send_dir + "/" + utc_source + "/" + cfreq + " " + sent_dir + "/" + utc_source
                                    rval, lines = self.system(cmd, 2)

                                    self.clean_utc_source_dir(send_dir + "/" +
                                                              utc_source)
                                    self.info(proc_type + " " + utc_source +
                                              "/" + cfreq +
                                              " transferred to " + module +
                                              ": " + transfer_rate)
                                else:
                                    self.info(proc_type + " " + utc_source +
                                              "/" + cfreq +
                                              " failed to transfer")
                        else:

                            # create a parent directory in the transferred dir
                            try:
                                os.makedirs(junk_dir + "/" + utc_source, 0755)
                            except OSError, e:
                                self.debug(str(e))

                            # now move this observation from send to junk
                            cmd = "mv " + send_dir + "/" + utc_source + "/" + cfreq + " " + junk_dir + "/" + utc_source + "/"
                            rval, lines = self.system(cmd, 2)

                            self.clean_utc_source_dir(send_dir + "/" +
                                                      utc_source)
                            self.info(proc_type + " " + utc_source + "/" +
                                      cfreq + " junked")
示例#21
0
  def patch_psrfits_header (self, input_dir, input_file):

    header_file = input_dir + "/obs.header"
    self.log(3, "patch_psrfits_header: header_file="+header_file)

    header = Config.readCFGFileIntoDict (input_dir + "/obs.header")

    new = {}
    new["obs:observer"] = header["OBSERVER"] 
    new["obs:projid"]   = header["PID"]

    new["be:nrcvr"]     = header["NPOL"]
    
    try:
      val               = header["RCVR_HAND"]
      new["rcvr:hand"]  = val
    except KeyError as e:
      self.log(2, "patch_psrfits_header: RCVR_HAND not set in header")

    try:
      val               = header["BACKEND_PHASE"] # Phase convention of backend
      new["be:phase"]   = val
    except KeyError as e:
      self.log(2, "patch_psrfits_header: BACKEND_PHASE not set in header")

    try:
      val               = header["FOLD_OUTTSUBINT"] # Correlator cycle time
      new["be:tcycle"]  = val
    except KeyError as e:
      self.log(2, "patch_psrfits_header: FOLD_OUTTSUBINT not set in header")

    new["be:dcc"]       = "0"     # Downconversion conjugation corrected
    new["sub:nsblk"]    = "1"     # Samples/row (SEARCH mode, else 1)
  
    # this needs to come from CAM, hack for now
    new["ext:trk_mode"] = "TRACK" # Tracking mode
    new["ext:bpa"]      = "0" # Beam position angle [?]
    new["ext:bmaj"]     = "0" # Beam major axis [degrees]
    new["ext:bmin"]     = "0" # Beam minor axis [degrees]
    
    self.log(3, "RepackDaemon::patch_psrfits_header freq=" + str(header["FREQ"]))

    new["ext:obsfreq"]  = header["FREQ"]
    new["ext:obsbw"]    = header["BW"]
    new["ext:obsnchan"] = header["NCHAN"]

    new["ext:stt_crd1"] = header["RA"]
    new["ext:stt_crd2"] = header["DEC"]
    new["ext:stp_crd1"] = header["RA"]
    new["ext:stp_crd2"] = header["DEC"]
    new["ext:stt_date"] = header["UTC_START"][0:10]
    new["ext:stt_time"] = header["UTC_START"][11:19]

    # build psredit command, in-place modification 
    cmd = "psredit -m"

    try:
      itrf = header["ITRF"]
      (x, y, z) = itrf.split(",")
      new["itrf:ant_x"] = x
      new["itrf:ant_y"] = y
      new["itrf:ant_z"] = z
      cmd = cmd + " -a itrf"

    except KeyError as e:
      self.log(2, "patch_psrfits_header: ITRF not set in header")

    # create the psredit command necessary to apply "new"
    cmd = cmd + " -c " + ",".join(['%s=%s' % (key, value) for (key, value) in new.items()]) + " " + input_file
    rval, lines = self.system(cmd, 2)
    if rval:
      return rval, lines[0]
    return 0, ""