def acquire_obs_header (self, in_dir): """Generate the obs.header file for the whole band from sub-bands.""" # test if already exists if os.path.exists (in_dir + "/obs.header"): self.log(2, "RepackDaemon::acquire_obs_header obs.header file already existed") return (0, "") subband_freqs = self.get_subbands (in_dir) # start with header file from first sub-band if not os.path.exists (in_dir + "/" + subband_freqs[0] + "/obs.header"): self.log(2, "RepackDaemon::acquire_obs_header first sub-band obs.header did not exist") return (1, "first sub-band header file did not exist") self.log (2, "RepackDaemon::acquire_obs_header header_file[0]=" + in_dir + "/" + subband_freqs[0] + "/obs.header") header = Config.readCFGFileIntoDict (in_dir + "/" + subband_freqs[0] + "/obs.header") # merge the headers from the other sub-bands for i in range(1,len(subband_freqs)): subband_header_file = in_dir + "/" + subband_freqs[i] + "/obs.header" self.log (2, "RepackDaemon::acquire_obs_header header_file[" + str(i)+ "]=" + subband_header_file) if os.path.exists (subband_header_file): header_sub = Config.readCFGFileIntoDict (subband_header_file) header = Config.mergeHeaderFreq (header, header_sub) else: return (1, "not all sub-band header files present") # write the combined header self.log (2, "RepackDaemon::acquire_obs_header writing header to " + in_dir + "/" + "obs.header") Config.writeDictToCFGFile (header, in_dir + "/" + "obs.header") return (0, "")
def acquire_obs_header(self, in_dir): """Generate the obs.header file for the whole band from sub-bands.""" # test if already exists if os.path.exists(in_dir + "/obs.header"): self.log( 2, "RepackDaemon::acquire_obs_header obs.header file already existed" ) return (0, "") subband_freqs = self.get_subbands(in_dir) # start with header file from first sub-band if not os.path.exists(in_dir + "/" + subband_freqs[0] + "/obs.header"): self.log( 2, "RepackDaemon::acquire_obs_header first sub-band obs.header did not exist" ) return (1, "first sub-band header file did not exist") self.log( 2, "RepackDaemon::acquire_obs_header header_file[0]=" + in_dir + "/" + subband_freqs[0] + "/obs.header") header = Config.readCFGFileIntoDict(in_dir + "/" + subband_freqs[0] + "/obs.header") # merge the headers from the other sub-bands for i in range(1, len(subband_freqs)): subband_header_file = in_dir + "/" + subband_freqs[ i] + "/obs.header" self.log( 2, "RepackDaemon::acquire_obs_header header_file[" + str(i) + "]=" + subband_header_file) if os.path.exists(subband_header_file): header_sub = Config.readCFGFileIntoDict(subband_header_file) header = Config.mergeHeaderFreq(header, header_sub) else: return (1, "not all sub-band header files present") # write the combined header self.log( 2, "RepackDaemon::acquire_obs_header writing header to " + in_dir + "/" + "obs.header") Config.writeDictToCFGFile(header, in_dir + "/" + "obs.header") return (0, "")
def main(self): self.log(2, "main: self.waitForSMRB()") smrb_exists = self.waitForSMRB() if not smrb_exists: self.log( -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" + self.db_key) self.quit_event.set() return # configuration file for recv stream self.local_config = self.getConfiguration() self.local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg" self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)] self.ctrl_port = str(int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id)) self.configured = True self.running = False env = self.getEnvironment() # external control loop to allow for reconfiguration of RECV while not self.quit_event.isSet(): self.log(3, "main: waiting for configuration") while not self.quit_event.isSet() and not self.configured: sleep(1) if self.quit_event.isSet(): return Config.writeDictToCFGFile(self.local_config, self.local_config_file) self.log(3, "main: configured") cmd = self.getCommand(self.local_config_file) self.binary_list.append(cmd) self.log(3, "main: sleep(1)") sleep(1) self.log(3, "main: log_pipe = LogSocket(recv_src)") log_pipe = LogSocket("recv_src", "recv_src", str(self.id), "stream", self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"], int(DL)) self.log(3, "main: log_pipe.connect()") log_pipe.connect() self.log(3, "main: sleep(1)") sleep(1) self.running = True recv_cmd = "numactl -C 6 -- " + cmd # this should be a persistent / blocking command rval = self.system_piped(recv_cmd, log_pipe.sock, int(DL), env) self.running = False self.binary_list = [] if rval: if self.quit_event.isSet(): self.log(-2, cmd + " failed with return value " + str(rval)) log_pipe.close()
def main(self): self.log(2, "UWBProcDaemon::main configure_child()") self.configure_child() self.log(2, "UWBProcDaemon::main wait_for_smrb()") SMRBDaemon.waitForSMRB(self.db_key, self) if self.quit_event.isSet(): self.log( -1, "UWBProcDaemon::main quit event was set after waiting for SMRB creation" ) return # continuously run the main command waiting on the SMRB while (not self.quit_event.isSet()): # wait for the header to determine if folding is required cmd = "dada_header -k " + self.db_key + " -t " + self.tag self.log(2, "UWBProcDaemon::main " + cmd) self.binary_list.append(cmd) rval, lines = self.system(cmd, 2, True) self.binary_list.remove(cmd) # if the command returned ok and we have a header if rval != 0: time.sleep(0.1) if self.quit_event.isSet(): self.log( 2, "UWBProcDaemon::main " + cmd + " failed, but quit_event true") else: self.log(-2, "UWBProcDaemon::main " + cmd + " failed") self.quit_event.set() elif len(lines) == 0: self.log(-2, "UWBProcDaemon::main header was empty") self.quit_event.set() else: self.log(2, "UWBProcDaemon::main parsing header") self.header = Config.parseHeader(lines) # account for lower to upper sideband conversion if not abs(float(self.bw)) == float(self.header["BW"]): self.log( -1, "configured bandwidth [" + self.bw + "] != self.header[" + self.header["BW"] + "]") if not float(self.cfreq) == float(self.header["FREQ"]): self.log( -1, "configured cfreq [" + self.cfreq + "] != self.header[" + self.header["FREQ"] + "]") if not int(self.nchan) == int(self.header["NCHAN"]): self.log( -2, "configured nchan [" + self.nchan + "] != self.header[" + self.header["NCHAN"] + "]") self.source = self.header["SOURCE"] self.utc_start = self.header["UTC_START"] # call the child class prepare method self.log(2, "UWBProcDaemon::main prepare()") valid = self.prepare() if valid: # ensure the output directory exists self.log( 2, "UWBProcDaemon::main creating out_dir: " + self.out_dir) if not os.path.exists(self.out_dir): os.makedirs(self.out_dir, 0755) # write the sub-bands header to the out_dir header_file = self.out_dir + "/obs.header" self.log( 2, "UWBProcDaemon::main writing obs.header to out_dir") Config.writeDictToCFGFile(self.header, header_file) # configure the output pipe self.log( 2, "UWBProcDaemon::main configuring output log pipe") log_host = self.cfg["SERVER_HOST"] log_port = int(self.cfg["SERVER_LOG_PORT"]) log_pipe = LogSocket(self.log_prefix, self.log_prefix, str(self.id), "stream", log_host, log_port, int(DL)) log_pipe.connect() # get any modifications to the environment env = self.getEnvironment() # add the binary command to the kill list self.binary_list.append(self.cmd) # create processing threads self.log( 2, "UWBProcDaemon::main creating processing threads") cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd proc_thread = UWBProcThread(self, cmd, log_pipe.sock, env, 1) # start processing threads self.log(2, "UWBProcDaemon::main starting processing thread") proc_thread.start() self.log(1, "START " + self.cmd) # join processing threads self.log( 2, "UWBProcDaemon::main waiting for proc thread to terminate" ) rval = proc_thread.join() self.log(2, "UWBProcDaemon::main proc thread joined") self.log(1, "END " + self.cmd) # remove the binary command from the list self.binary_list.remove(self.cmd) if rval: self.log(-2, "UWBProcDaemon::main proc thread failed") quit_event.set() log_pipe.close() # good practise in case the proc thread always fails time.sleep(1) else: self.log(2, "MEERKATProcDaemon::main skip this processing") time.sleep(10) self.log(2, "UWBProcDaemon::main processing loop completed")
def prepare (self): self.log (2, "UWBFoldDaemon::prepare UTC_START=" + self.header["UTC_START"]) self.log (2, "UWBFoldDaemon::prepare RESOLUTION=" + self.header["RESOLUTION"]) # default processing commands self.cmd = "dada_dbnull -s -k " + self.db_key # check if FOLD mode has been requested in the header try: fold = (self.header["PERFORM_FOLD"] in ["1", "true"]) except KeyError as e: fold = False # if no folding has been requested return if not fold: return False # output directory for FOLD mode self.out_dir = self.cfg["CLIENT_FOLD_DIR"] + "/processing/" + self.beam + "/" \ + self.utc_start + "/" + self.source + "/" + self.cfreq # create DSPSR input file for the data block db_key_filename = "/tmp/spip_" + self.db_key + ".info" if not os.path.exists (db_key_filename): db_key_file = open (db_key_filename, "w") db_key_file.write("DADA INFO:\n") db_key_file.write("key " + self.db_key + "\n") db_key_file.close() # create DSPSR viewing file for the data block view_key_filename = "/tmp/spip_" + self.db_key + ".viewer" if not os.path.exists (view_key_filename): view_key_file = open (view_key_filename, "w") view_key_file.write("DADA INFO:\n") view_key_file.write("key " + self.db_key + "\n") view_key_file.write("viewer\n") view_key_file.close() outnstokes = -1 outtsubint = -1 dm = -1 outnbin = -1 innchan = int(self.header["NCHAN"]) outnchan = innchan sk = False sk_threshold = -1 sk_nsamps = -1 mode = "PSR" try: outnstokes = int(self.header["FOLD_OUTNSTOKES"]) except: outnstokes = 4 try: outtsub = int(self.header["FOLD_OUTTSUBINT"]) except: outtsub = 10 try: outnbin = int(self.header["FOLD_OUTNBIN"]) except: outnbin = 1024 try: outnchan = int(self.header["FOLD_OUTNCHAN"]) except: outnchan = 0 innchan = 0 try: mode = self.header["MODE"] except: mode = "PSR" try: dm = float(self.header["DM"]) except: dm = -1 try: sk = self.header["FOLD_SK"] == "1" except: sk = False try: sk_threshold = int(self.header["FOLD_SK_THRESHOLD"]) except: sk_threshold = 3 try: sk_nsamps = int(self.header["FOLD_SK_NSAMPS"]) except: sk_nsamps = 1024 # configure the command to be run self.cmd = "dspsr -Q " + db_key_filename + " -minram 2048 -cuda " + self.gpu_id + " -no_dyn" # handle detection options if outnstokes >= 1 or outnstokes <= 4: # hack for NPOL==1 if self.header["NPOL"] == "1": self.cmd = self.cmd + " -d 1" else: self.cmd = self.cmd + " -d " + str(outnstokes) elif outnstokes == -1: self.log(2, "using stokes IQUV default for DSPSR") else: self.log(-1, "ignoring invalid outnstokes of " + str(outnstokes)) # handle channelisation if outnchan > innchan: if outnchan % innchan == 0: if mode == "PSR": self.cmd = self.cmd + " -F " + str(outnchan) + ":D" else: self.cmd = self.cmd + " -F " + str(outnchan) + ":" + str(outnchan*4) else: self.log(-1, "output channelisation was not a multiple of input channelisation") else: self.log(-2, "requested output channelisation [" + str(outnchan) + "] " + \ "less than input channelisation [" + str(innchan) + "]") # handle output binning if outnbin > 0: self.cmd = self.cmd + " -b " + str(outnbin) # subint is required self.cmd = self.cmd + " -L " + str(outtsub) mintsub = outtsub - 5 if mintsub > 0: self.cmd = self.cmd + " -Lmin " + str(mintsub) # if observing a puslar if mode == "PSR": # handle a custom DM if dm >= 0: self.cmd = self.cmd + " -D " + str(dm) # if the SK options are active if sk: self.cmd = self.cmd + " -skz -skz_no_fscr" if sk_threshold != -1: self.cmd = self.cmd + " -skzs " + str(sk_threshold) if sk_nsamps != -1: self.cmd = self.cmd + " -skzm " + str(sk_nsamps) # if we are trialing the wideband predictor mode if self.wideband_predictor: # create a copy of the header to modify fullband_header = copy.deepcopy (self.header) nchan_total = 0 freq_low = 1e12 freq_high = -1e12 # now update the key parameters of the header for i in range(int(self.cfg["NUM_STREAM"])): (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + str(i)].split(":") nchan_total += int(nchan) half_chan_bw = abs(float(bw)) freq_low_subband = float(cfreq) - half_chan_bw freq_high_subband = float(cfreq) + half_chan_bw if freq_low_subband < freq_low: freq_low = freq_low_subband if freq_high_subband > freq_high: freq_high = freq_high_subband bw = (freq_high - freq_low) fullband_header["NCHAN"] = str(nchan_total) fullband_header["BW"] = str(bw) fullband_header["FREQ"] = str(freq_low + bw/2) self.info("fullband predictor: NCHAN=" + fullband_header["NCHAN"] + " BW=" + fullband_header["BW"] + " FREQ=" + fullband_header["FREQ"]) # create the output directory if not os.path.exists (self.out_dir): os.makedirs (self.out_dir, 0755) # write the sub-bands header to the out_dir dummy_file = self.out_dir + "/obs.dummy" Config.writeDictToCFGFile (fullband_header, dummy_file, prepend='DUMMY') # generate an ephemeris file ephemeris_file = self.out_dir + "/pulsar.eph" cmd = "psrcat -all -e " + self.header["SOURCE"] + " > " + ephemeris_file rval, lines = self.system(cmd, 1) # generate the tempo2 predictor cmd = "t2pred " + ephemeris_file + " " + dummy_file rval, lines = self.system(cmd, 1, False, self.getEnvironment()) # copy the predictor file to the out_dir predictor_file = self.out_dir + "/pulsar.pred" cmd = "cp /tmp/tempo2/uwb" + str(self.id) + "/t2pred.dat " + predictor_file rval, lines = self.system(cmd, 1) # append the ephemeris and predictor to DSPSR command line self.cmd = self.cmd + " -E " + ephemeris_file + " -P " + predictor_file # set the optimal filterbank kernel length self.cmd = self.cmd + " -fft-bench" self.log_prefix = "fold_src" return True
def main(self): stream_id = self.id # get the data block keys db_prefix = self.cfg["DATA_BLOCK_PREFIX"] db_id_in = self.cfg["PROCESSING_DATA_BLOCK"] db_id_out = self.cfg["SEND_DATA_BLOCK"] num_stream = self.cfg["NUM_STREAM"] cpu_core = self.cfg["STREAM_PROC_CORE_" + stream_id] db_key_in = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, db_id_in) db_key_out = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, db_id_out) self.log(0, "db_key_in=" + db_key_in + " db_key_out=" + db_key_out) # create dspsr input file for the data block db_key_filename = "/tmp/spip_" + db_key_in + ".info" db_key_file = open(db_key_filename, "w") db_key_file.write("DADA INFO:\n") db_key_file.write("key " + db_key_in + "\n") db_key_file.close() gpu_id = self.cfg["GPU_ID_" + str(self.id)] prev_utc_start = "" (host, beam, subband) = self.cfg["STREAM_" + stream_id].split(":") (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + subband].split(":") # wait up to 10s for the SMRB to be created smrb_wait = 10 cmd = "dada_dbmetric -k " + db_key_in self.binary_list.append(cmd) rval = 1 while rval and smrb_wait > 0 and not self.quit_event.isSet(): rval, lines = self.system(cmd) if rval: time.sleep(1) smrb_wait -= 1 if rval: self.log( -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" + db_key_in) self.quit_event.set() else: while (not self.quit_event.isSet()): cmd = "dada_header -k " + db_key_in self.log(0, cmd) self.binary_list.append(cmd) rval, lines = self.system(cmd) self.binary_list.remove(cmd) # if the command returned ok and we have a header if rval != 0: if self.quit_event.isSet(): self.log(2, cmd + " failed, but quit_event true") else: self.log(-2, cmd + " failed") self.quit_event.set() elif len(lines) == 0: self.log(-2, "header was empty") self.quit_event.set() else: header = Config.parseHeader(lines) utc_start = header["UTC_START"] self.log(1, "UTC_START=" + header["UTC_START"]) self.log(1, "RESOLUTION=" + header["RESOLUTION"]) # default processing commands fold_cmd = "dada_dbnull -s -k " + db_key_in trans_cmd = "dada_dbnull -s -k " + db_key_out search_cmd = "dada_dbnull -s -k " + db_key_in if prev_utc_start == utc_start: self.log( -2, "UTC_START [" + utc_start + "] repeated, ignoring observation") else: beam = self.cfg["BEAM_" + str(self.beam_id)] if not float(bw) == float(header["BW"]): self.log( -1, "configured bandwidth [" + bw + "] != header[" + header["BW"] + "]") if not float(cfreq) == float(header["FREQ"]): self.log( -1, "configured cfreq [" + cfreq + "] != header[" + header["FREQ"] + "]") if not int(nchan) == int(header["NCHAN"]): self.log( -2, "configured nchan [" + nchan + "] != header[" + header["NCHAN"] + "]") source = header["SOURCE"] # output directories suffix = "/processing/" + beam + "/" + utc_start + "/" + source + "/" + cfreq fold_dir = self.cfg["CLIENT_FOLD_DIR"] + suffix trans_dir = self.cfg["CLIENT_TRANS_DIR"] + suffix search_dir = self.cfg["CLIENT_SEARCH_DIR"] + suffix fold = False search = False trans = False try: fold = (header["PERFORM_FOLD"] == "1") search = (header["PERFORM_SEARCH"] == "1") trans = (header["PERFORM_TRANS"] == "1") except KeyError as e: fold = True search = False trans = False if fold: os.makedirs(fold_dir, 0755) fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -overlap -minram 4000 -x 16384 -b 1024 -L 5 -no_dyn" fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 512 -b 1024 -L 10 -no_dyn -skz -skzs 4 -skzm 128 -skz_no_tscr -skz_no_fscr" #fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 2048 -b 1024 -Lmin 7 -L 8 -no_dyn" fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -minram 2048 -x 1024 -b 1024 -L 8 -Lmin 7 -no_dyn" #fold_cmd = "dada_dbdisk -k " + db_key_in + " -s -D " + fold_dir header_file = fold_dir + "/obs.header" Config.writeDictToCFGFile(header, header_file) if search or trans: os.makedirs(search_dir, 0755) search_cmd = "digifil " + db_key_filename + " -c -B 10 -o " + utc_start + " .fil" if trans: search_cmd += " -k " + db_key_out if trans and int(self.cfg["NUM_SUBBAND"]) == "1": os.makedirs(trans_dir, 0755) trans_cmd = "heimdall -k " + db_key_out + " -gpu_id 1" log_host = self.cfg["SERVER_HOST"] log_port = int(self.cfg["SERVER_LOG_PORT"]) # setup output pipes fold_log_pipe = LogSocket("fold_src", "fold_src", str(self.id), "stream", log_host, log_port, int(DL)) #trans_log_pipe = LogSocket ("trans_src", "trans_src", str(self.id), "stream", # log_host, log_port, int(DL)) #search_log_pipe = LogSocket ("search_src", "search_src", str(self.id), "stream", # log_host, log_port, int(DL)) fold_log_pipe.connect() self.binary_list.append(fold_cmd) #self.binary_list.append (trans_cmd) #self.binary_list.append (search_cmd) # create processing threads self.log(2, "creating processing threads") cmd = "numactl -C " + cpu_core + " -- " + fold_cmd fold_thread = procThread(cmd, fold_dir, fold_log_pipe.sock, 1) #trans_thread = procThread (trans_cmd, self.log_sock.sock, 2) #search_thread = procThread (search_cmd, self.log_sock.sock, 2) # start processing threads self.log(2, "starting processing threads") self.log(1, "START " + fold_cmd) fold_thread.start() #trans_thread.start() #search_thread.start() # join processing threads self.log(2, "waiting for fold thread to terminate") rval = fold_thread.join() self.log(2, "fold thread joined") self.log(1, "END " + fold_cmd) # remove the binary command from the list self.binary_list.remove(fold_cmd) if rval: self.log(-2, "fold thread failed") quit_event.set() #self.log (2, "joining trans thread") #rval = trans_thread.join() #self.log (2, "trans thread joined") #if rval: # self.log (-2, "trans thread failed") # quit_event.set() #self.log (2, "joining search thread") #rval = search_thread.join() #self.log (2, "search thread joined") #if rval: # self.log (-2, "search thread failed") # quit_event.set() fold_log_pipe.close() #trans_log_pipe.close() #search_log_pipe.close() self.log(1, "processing completed")
def main (self): self.log (2, "UWBProcDaemon::main configure_child()") self.configure_child() self.log (2, "UWBProcDaemon::main wait_for_smrb()") SMRBDaemon.waitForSMRB(self.db_key, self) if self.quit_event.isSet(): self.log (-1, "UWBProcDaemon::main quit event was set after waiting for SMRB creation") return # continuously run the main command waiting on the SMRB while (not self.quit_event.isSet()): # wait for the header to determine if folding is required cmd = "dada_header -k " + self.db_key + " -t " + self.tag self.log(2, "UWBProcDaemon::main " + cmd) self.binary_list.append (cmd) rval, lines = self.system (cmd, 2, True) self.binary_list.remove (cmd) # if the command returned ok and we have a header if rval != 0: time.sleep(0.1) if self.quit_event.isSet(): self.log (2, "UWBProcDaemon::main " + cmd + " failed, but quit_event true") else: self.log (-2, "UWBProcDaemon::main " + cmd + " failed") self.quit_event.set() elif len(lines) == 0: self.log (-2, "UWBProcDaemon::main header was empty") self.quit_event.set() else: self.log (2, "UWBProcDaemon::main parsing header") self.header = Config.parseHeader (lines) # account for lower to upper sideband conversion if not abs(float(self.bw)) == float(self.header["BW"]): self.log (-1, "configured bandwidth ["+self.bw+"] != self.header["+self.header["BW"]+"]") if not float(self.cfreq) == float(self.header["FREQ"]): self.log (-1, "configured cfreq ["+self.cfreq+"] != self.header["+self.header["FREQ"]+"]") if not int(self.nchan) == int(self.header["NCHAN"]): self.log (-2, "configured nchan ["+self.nchan+"] != self.header["+self.header["NCHAN"]+"]") self.source = self.header["SOURCE"] self.utc_start = self.header["UTC_START"] # call the child class prepare method self.log (2, "UWBProcDaemon::main prepare()") valid = self.prepare() if valid: # ensure the output directory exists self.log (2, "UWBProcDaemon::main creating out_dir: " + self.out_dir) if not os.path.exists (self.out_dir): os.makedirs (self.out_dir, 0755) # write the sub-bands header to the out_dir header_file = self.out_dir + "/obs.header" self.log (2, "UWBProcDaemon::main writing obs.header to out_dir") Config.writeDictToCFGFile (self.header, header_file) # configure the output pipe self.log (2, "UWBProcDaemon::main configuring output log pipe") log_host = self.cfg["SERVER_HOST"] log_port = int(self.cfg["SERVER_LOG_PORT"]) log_pipe = LogSocket (self.log_prefix, self.log_prefix, str(self.id), "stream", log_host, log_port, int(DL)) log_pipe.connect() # get any modifications to the environment env = self.getEnvironment() # add the binary command to the kill list self.binary_list.append (self.cmd) # create processing threads self.log (2, "UWBProcDaemon::main creating processing threads") cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd proc_thread = UWBProcThread (self, cmd, log_pipe.sock, env, 1) # start processing threads self.log (2, "UWBProcDaemon::main starting processing thread") proc_thread.start() self.log (1, "START " + self.cmd) # join processing threads self.log (2, "UWBProcDaemon::main waiting for proc thread to terminate") rval = proc_thread.join() self.log (2, "UWBProcDaemon::main proc thread joined") self.log (1, "END " + self.cmd) # remove the binary command from the list self.binary_list.remove (self.cmd) if rval: self.log (-2, "UWBProcDaemon::main proc thread failed") quit_event.set() log_pipe.close() # good practise in case the proc thread always fails time.sleep(1) else: self.log (2, "MEERKATProcDaemon::main skip this processing") time.sleep(10) self.log (2, "UWBProcDaemon::main processing loop completed")
def main (self): self.log(2, "RecvDaemon::main self.waitForSMRB()") smrb_exists = self.waitForSMRB() if not smrb_exists: self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " + "key=" + self.db_key) self.quit_event.set() return # configuration file for recv stream self.local_config = self.getConfiguration() self.local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg" self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)] self.ctrl_port = str(int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id)) self.configured = True self.running = False env = self.getEnvironment() # external control loop to allow for reconfiguration of RECV while not self.quit_event.isSet(): self.log(2, "RecvDaemon::main waiting for configuration") while not self.quit_event.isSet() and not self.configured: sleep(1) if self.quit_event.isSet(): return Config.writeDictToCFGFile (self.local_config, self.local_config_file) self.log(2, "RecvDaemon:: configured") cmd = self.getCommand(self.local_config_file) self.binary_list.append (cmd) self.log(3, "RecvDaemon::main sleep(1)") sleep(1) self.log(2, "RecvDaemon::main log_pipe = LogSocket(recv_src)") log_pipe = LogSocket ("recv_src", "recv_src", str(self.id), "stream", self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"], int(DL)) self.log(2, "RecvDaemon::main log_pipe.connect()") log_pipe.connect() self.log(2, "RecvDaemon::main sleep(1)") sleep(1) self.running = True self.numa_core = self.cfg["STREAM_RECV_CORE_" + self.id] self.numa_node = self.cfg["STREAM_NUMA_" + self.id] recv_cmd = "numactl -C " + self.numa_core + " --membind=" + self.numa_node + " -- " + cmd self.log(1, "START " + cmd) # this should be a persistent / blocking command rval = self.system_piped (recv_cmd, log_pipe.sock, 2, env) self.log(1, "END " + cmd) self.running = False self.binary_list.remove (cmd) if rval: if not self.quit_event.isSet(): self.log (-2, cmd + " failed with return value " + str(rval)) log_pipe.close ()
def main (self): archives_glob = "*.ar" self.log (2, "main: beams=" + str(self.beams)) # archives stored in directory structure # beam / utc_start / source / cfreq / "fold" # summary data stored in # beam / utc_start / source / freq.sum # out_cfreq = 0 if not os.path.exists(self.processing_dir): os.makedirs(self.processing_dir, 0755) if not os.path.exists(self.finished_dir): os.makedirs(self.finished_dir, 0755) if not os.path.exists(self.archived_dir): os.makedirs(self.archived_dir, 0755) self.log (2, "main: stream_id=" + str(self.id)) while (not self.quit_event.isSet()): processed_this_loop = 0 # check each beam for folded archives to process for beam in self.beams: beam_dir = self.processing_dir + "/" + beam self.log (3, "main: beam=" + beam + " beam_dir=" + beam_dir) if not os.path.exists(beam_dir): os.makedirs(beam_dir, 0755) # get a list of all the recent observations cmd = "find " + beam_dir + " -mindepth 2 -maxdepth 2 -type d" rval, observations = self.system (cmd, 3) # for each observation for observation in observations: # strip prefix observation = observation[(len(beam_dir)+1):] (utc, source) = observation.split("/") if source == "stats": continue obs_dir = beam_dir + "/" + observation out_dir = self.archived_dir + "/" + beam + "/" + utc + "/" + source + "/" + str(self.out_cfreq) if not os.path.exists(out_dir): os.makedirs(out_dir, 0755) # if we have only 1 sub-band, then files can be processed immediately archives = {} for subband in self.subbands: self.log (3, "processing subband=" + str(subband)) cmd = "find " + obs_dir + "/" + subband["cfreq"] + " -mindepth 1 -maxdepth 1 " + \ "-type f -name '" + archives_glob + "' -printf '%f\\n'" rval, files = self.system (cmd, 3) for file in files: if not file in archives: archives[file] = 0 archives[file] += 1 # if a file meets the subband count it is ripe for processing files = archives.keys() files.sort() for file in files: processed_this_loop += 1 self.log (1, observation + ": processing " + file) if archives[file] == len(self.subbands): if len(self.subbands) > 1: self.log (2, "main: process_subband()") (rval, response) = self.process_subband (obs_dir, out_dir, source, file) if rval: self.log (-1, "failed to process sub-bands for " + file + ": " + response) else: input_file = obs_dir + "/" + self.subbands[0]["cfreq"] + "/" + file self.log (2, "main: process_archive() "+ input_file) (rval, response) = self.process_archive (obs_dir, input_file, out_dir, source) if rval: self.log (-1, "failed to process " + file + ": " + response) if len(files) > 0: # now process the sum files to produce plots etc self.log (2, "main: process_observation("+beam+","+utc+","+source+","+obs_dir+")") (rval, response) = self.process_observation (beam, utc, source, obs_dir) if rval: self.log (-1, "failed to process observation: " + response) # if the proc has marked this observation as finished all_finished = True any_failed = False # perhaps a file was produced whilst the previous list was being processed, # do another pass if len(files) > 0: all_finished = False for subband in self.subbands: filename = obs_dir + "/" + subband["cfreq"] + "/obs.finished" if os.path.exists(filename): if os.path.getmtime(filename) + 10 > time.time(): all_finished = False else: all_finished = False filename = obs_dir + "/" + subband["cfreq"] + "/obs.failed" if os.path.exists(filename): any_failed = True # the observation has failed, cleanup if any_failed: self.log (1, observation + ": processing -> failed") all_finished = False fail_parent_dir = self.failed_dir + "/" + beam + "/" + utc if not os.path.exists(fail_parent_dir): os.makedirs(fail_parent_dir, 0755) fail_dir = self.failed_dir + "/" + beam + "/" + utc + "/" + source self.log (2, "main: fail_observation("+obs_dir+")") (rval, response) = self.fail_observation (beam, obs_dir, fail_dir, out_dir) if rval: self.log (-1, "failed to finalise observation: " + response) # The observation has finished, cleanup if all_finished: self.log (1, observation + ": processing -> finished") fin_parent_dir = self.finished_dir + "/" + beam + "/" + utc if not os.path.exists(fin_parent_dir): os.makedirs(fin_parent_dir, 0755) fin_dir = self.finished_dir + "/" + beam + "/" + utc + "/" + source self.log (2, "main: finalise_observation("+obs_dir+")") (rval, response) = self.finalise_observation (beam, obs_dir, fin_dir, out_dir) if rval: self.log (-1, "failed to finalise observation: " + response) else: # merge the headers from each sub-band header = Config.readCFGFileIntoDict (fin_dir + "/" + self.subbands[0]["cfreq"] + "/obs.header") for i in range(1,len(self.subbands)): header_sub = Config.readCFGFileIntoDict (fin_dir + "/" + self.subbands[i]["cfreq"] + "/obs.header") header = Config.mergerHeaderFreq (header, header_sub) os.remove (fin_dir + "/" + self.subbands[i]["cfreq"] + "/obs.header") os.remove (fin_dir + "/" + self.subbands[i]["cfreq"] + "/obs.finished") os.removedirs (fin_dir + "/" + self.subbands[i]["cfreq"]) os.remove (fin_dir + "/" + self.subbands[0]["cfreq"] + "/obs.header") os.remove (fin_dir + "/" + self.subbands[0]["cfreq"] + "/obs.finished") os.removedirs (fin_dir + "/" + self.subbands[0]["cfreq"]) Config.writeDictToCFGFile (header, fin_dir + "/" + "obs.header") shutil.copyfile (fin_dir + "/obs.header", out_dir + "/obs.header") if processed_this_loop == 0: self.log (3, "time.sleep(1)") time.sleep(1)
def main(self): if not os.path.exists(self.proc_dir): os.makedirs(self.proc_dir, 0755) # get the data block keys db_prefix = self.cfg["DATA_BLOCK_PREFIX"] num_stream = self.cfg["NUM_STREAM"] stream_id = str(self.id) self.debug("stream_id=" + str(self.id)) # 4 data blocks in_id = self.cfg["RECEIVING_DATA_BLOCK"] trans_id = self.cfg["TRANSIENTS_DATA_BLOCK"] out_id = self.cfg["PROCESSING_DATA_BLOCK"] # 4 data block keys in_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, in_id) trans_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, trans_id) out_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, out_id) log_host = self.cfg["SERVER_HOST"] log_port = int(self.cfg["SERVER_LOG_PORT"]) self.debug("SMRBDaemon.waitForSMRB()") smrb_exists = SMRBDaemon.waitForSMRB(in_key, self) if not smrb_exists: self.error("smrb["+str(self.id)+"] no valid SMRB with " + "key=" + self.db_key) self.quit_event.set() return # determine the number of channels to be processed by this stream (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":") # this stat command will not change from observation to observation preproc_cmd = "uwb_preprocessing_pipeline " + in_key + " " + \ trans_key + " " + out_key + " -d " + \ self.cfg["GPU_ID_" + stream_id] tag = "preproc" + stream_id # enter the main loop while (not self.quit_event.isSet()): # wait for the header to acquire the processing parameters cmd = "dada_header -k " + in_key + " -t " + tag self.debug(cmd) self.binary_list.append(cmd) rval, lines = self.system(cmd, 2, True) self.binary_list.remove(cmd) if rval != 0 or self.quit_event.isSet(): return self.debug("parsing header") header = Config.parseHeader(lines) cmd = preproc_cmd utc_start = header["UTC_START"] source = header["SOURCE"] freq = header["FREQ"] # directory in which to run preprocessor proc_dir = self.proc_dir + "/" + utc_start + "/" + source + "/" + \ freq if not os.path.exists(proc_dir): os.makedirs(proc_dir, 0755) # write the header to the proc_dir header_file = proc_dir + "/obs.header" self.debug("writing obs.header to out_dir") Config.writeDictToCFGFile(header, header_file) run_adaptive_filter = (header["ADAPTIVE_FILTER"] == "1") # presense of RFI reference is based on NPOL == 3 have_rfi_reference_pol = (int(header["NPOL"]) == 3) # presence of a calibration signal run_calibration = (header["CAL_SIGNAL"] == "1") # run the transients processor # run_transients = (header["TRANSIENTS"] == "1") run_transients = False # RFI reference pol is assumed to be last pol if have_rfi_reference_pol: rfi_reference_pol = int(header["NPOL"]) - 1 self.info("Header NPOL=" + str(int(header["NPOL"])) + " RFI reference signal present in pol " + str(rfi_reference_pol)) cmd = cmd + " -r " + str(rfi_reference_pol) if run_adaptive_filter: self.info("Adaptive filter active") cmd = cmd + " -a " if run_calibration: self.info("Calibration active") try: avg_time = header["TSYS_AVG_TIME"] except KeyError: avg_time = "10" try: freq_res = header["TSYS_FREQ_RES"] except KeyError: freq_res = "1" cmd = cmd + " -c " + avg_time + " -e " + freq_res if run_transients: self.info("Transients active") cmd = cmd + " -f " + header["TRANS_TSAMP"] # AJ todo check the channelisation limits with Nuer if run_adaptive_filter or run_calibration or run_transients: cmd = cmd + " -n 1024" # create a log pipe for the stats command log_pipe = LogSocket("preproc_src", "preproc_src", str(self.id), "stream", log_host, log_port, int(DL)) # connect up the log file output log_pipe.connect() # add this binary to the list of active commands self.binary_list.append("uwb_preprocessing_pipeline " + in_key) self.info("START " + cmd) # initialize the threads preproc_thread = preprocThread(cmd, proc_dir, log_pipe.sock, 2) self.debug("starting preproc thread") preproc_thread.start() self.debug("preproc thread started") self.debug("joining preproc thread") rval = preproc_thread.join() self.debug("preproc thread joined") self.info("END " + cmd) if rval: self.error("preproc thread failed") cmd = "touch " + proc_dir + "/obs.finished" rval, lines = self.system(cmd, 2) self.quit_event.set() else: cmd = "touch " + proc_dir + "/obs.finished" rval, lines = self.system(cmd, 2)
def main(self): db_id = self.cfg["PROCESSING_DATA_BLOCK"] db_prefix = self.cfg["DATA_BLOCK_PREFIX"] num_stream = self.cfg["NUM_STREAM"] self.db_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream, db_id) self.log(0, "db_key=" + self.db_key) # wait up to 10s for the SMRB to be created smrb_wait = 10 cmd = "dada_dbmetric -k " + self.db_key self.binary_list.append(cmd) rval = 1 while rval and smrb_wait > 0 and not self.quit_event.isSet(): rval, lines = self.system(cmd) if rval: sleep(1) smrb_wait -= 1 if rval: self.log( -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" + self.db_key) self.quit_event.set() else: local_config = self.getConfiguration() self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)] self.ctrl_port = str( int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id)) # write this config to file local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg" self.log(1, "main: creating " + local_config_file) Config.writeDictToCFGFile(local_config, local_config_file) env = self.getEnvironment() cmd = self.getCommand(local_config_file) self.binary_list.append(cmd) self.log(3, "main: sleep(1)") sleep(1) self.log(3, "main: log_pipe = LogSocket(recvsim_src))") log_pipe = LogSocket("recvsim_src", "recvsim_src", str(self.id), "stream", self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"], int(DL)) self.log(3, "main: log_pipe.connect()") log_pipe.connect() self.log(3, "main: sleep(1)") sleep(1) # this should be a persistent / blocking command rval = self.system_piped(cmd, log_pipe.sock) if rval: self.log(-2, cmd + " failed with return value " + str(rval)) self.quit_event.set() log_pipe.close()
def main (self): db_id = self.cfg["RECEIVING_DATA_BLOCK"] db_prefix = self.cfg["DATA_BLOCK_PREFIX"] num_stream = self.cfg["NUM_STREAM"] self.db_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, db_id) # wait for the SMRB to exist before continuing self.log(2, "main: SMRBDaemon.waitForSMRB()") smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self) # don't proceed without an SMRB if not smrb_exists: self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " + "key=" + self.db_key) self.quit_event.set() return # configuration file for recvsim stream self.local_config = self.getConfiguration() self.local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg" self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)] self.ctrl_port = str(int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id)) self.configured = True self.running = False env = self.getEnvironment() # external control loop to allow for reconfiguration of RECV while not self.quit_event.isSet(): while not self.quit_event.isSet() and not self.configured: self.log(3, "main: waiting for configuration") sleep(1) if self.quit_event.isSet(): return Config.writeDictToCFGFile (self.local_config, self.local_config_file) self.log(3, "main: configured") cmd = self.getCommand(self.local_config_file) self.binary_list.append (cmd) self.log(3, "main: sleep(1)") sleep(1) self.log(3, "main: log_pipe = LogSocket(recvsim_src))") log_pipe = LogSocket ("recvsim_src", "recvsim_src", str(self.id), "stream", self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"], int(DL)) self.log(3, "main: log_pipe.connect()") log_pipe.connect() self.log(3, "main: sleep(1)") sleep(1) self.running = True self.log(1, "START " + cmd) # this should be a persistent / blocking command rval = self.system_piped (cmd, log_pipe.sock) self.running = False self.binary_list.remove (cmd) self.log(1, "END " + cmd) if rval: if self.quit_event.isSet(): self.log (-2, cmd + " failed with return value " + str(rval)) log_pipe.close ()
def main(self): if not os.path.exists(self.proc_dir): os.makedirs(self.proc_dir, 0755) # get the data block keys db_prefix = self.cfg["DATA_BLOCK_PREFIX"] num_stream = self.cfg["NUM_STREAM"] stream_id = str(self.id) self.debug("stream_id=" + str(self.id)) # 4 data blocks in_id = self.cfg["RECEIVING_DATA_BLOCK"] trans_id = self.cfg["TRANSIENTS_DATA_BLOCK"] out_id = self.cfg["PROCESSING_DATA_BLOCK"] # 4 data block keys in_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, in_id) trans_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, trans_id) out_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, out_id) log_host = self.cfg["SERVER_HOST"] log_port = int(self.cfg["SERVER_LOG_PORT"]) self.debug("SMRBDaemon.waitForSMRB()") smrb_exists = SMRBDaemon.waitForSMRB(in_key, self) if not smrb_exists: self.error("smrb[" + str(self.id) + "] no valid SMRB with " + "key=" + self.db_key) self.quit_event.set() return # determine the number of channels to be processed by this stream (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":") # this stat command will not change from observation to observation preproc_cmd = "uwb_preprocessing_pipeline " + in_key + " " + \ trans_key + " " + out_key + " -d " + \ self.cfg["GPU_ID_" + stream_id] tag = "preproc" + stream_id # enter the main loop while (not self.quit_event.isSet()): # wait for the header to acquire the processing parameters cmd = "dada_header -k " + in_key + " -t " + tag self.debug(cmd) self.binary_list.append(cmd) rval, lines = self.system(cmd, 2, True) self.binary_list.remove(cmd) if rval != 0 or self.quit_event.isSet(): return self.debug("parsing header") header = Config.parseHeader(lines) cmd = preproc_cmd utc_start = header["UTC_START"] source = header["SOURCE"] freq = header["FREQ"] # directory in which to run preprocessor proc_dir = self.proc_dir + "/" + utc_start + "/" + source + "/" + \ freq if not os.path.exists(proc_dir): os.makedirs(proc_dir, 0755) # write the header to the proc_dir header_file = proc_dir + "/obs.header" self.debug("writing obs.header to out_dir") Config.writeDictToCFGFile(header, header_file) run_adaptive_filter = (header["ADAPTIVE_FILTER"] == "1") # presense of RFI reference is based on NPOL == 3 have_rfi_reference_pol = (int(header["NPOL"]) == 3) # presence of a calibration signal run_calibration = (header["CAL_SIGNAL"] == "1") # run the transients processor # run_transients = (header["TRANSIENTS"] == "1") run_transients = False # RFI reference pol is assumed to be last pol if have_rfi_reference_pol: rfi_reference_pol = int(header["NPOL"]) - 1 self.info("Header NPOL=" + str(int(header["NPOL"])) + " RFI reference signal present in pol " + str(rfi_reference_pol)) cmd = cmd + " -r " + str(rfi_reference_pol) if run_adaptive_filter: self.info("Adaptive filter active") cmd = cmd + " -a " if run_calibration: self.info("Calibration active") try: avg_time = header["TSYS_AVG_TIME"] except KeyError: avg_time = "10" try: freq_res = header["TSYS_FREQ_RES"] except KeyError: freq_res = "1" cmd = cmd + " -c " + avg_time + " -e " + freq_res if run_transients: self.info("Transients active") cmd = cmd + " -f " + header["TRANS_TSAMP"] # AJ todo check the channelisation limits with Nuer if run_adaptive_filter or run_calibration or run_transients: cmd = cmd + " -n 1024" # create a log pipe for the stats command log_pipe = LogSocket("preproc_src", "preproc_src", str(self.id), "stream", log_host, log_port, int(DL)) # connect up the log file output log_pipe.connect() # add this binary to the list of active commands self.binary_list.append("uwb_preprocessing_pipeline " + in_key) self.info("START " + cmd) # initialize the threads preproc_thread = preprocThread(cmd, proc_dir, log_pipe.sock, 2) self.debug("starting preproc thread") preproc_thread.start() self.debug("preproc thread started") self.debug("joining preproc thread") rval = preproc_thread.join() self.debug("preproc thread joined") self.info("END " + cmd) if rval: self.error("preproc thread failed") cmd = "touch " + proc_dir + "/obs.finished" rval, lines = self.system(cmd, 2) self.quit_event.set() else: cmd = "touch " + proc_dir + "/obs.finished" rval, lines = self.system(cmd, 2)
def main (self): stream_id = self.id # get the data block keys db_prefix = self.cfg["DATA_BLOCK_PREFIX"] db_id_in = self.cfg["PROCESSING_DATA_BLOCK"] db_id_out = self.cfg["SEND_DATA_BLOCK"] num_stream = self.cfg["NUM_STREAM"] cpu_core = self.cfg["STREAM_PROC_CORE_" + stream_id] db_key_in = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id_in) db_key_out = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id_out) self.log (0, "db_key_in=" + db_key_in + " db_key_out=" + db_key_out) # create dspsr input file for the data block db_key_filename = "/tmp/spip_" + db_key_in + ".info" db_key_file = open (db_key_filename, "w") db_key_file.write("DADA INFO:\n") db_key_file.write("key " + db_key_in + "\n") db_key_file.close() gpu_id = self.cfg["GPU_ID_" + str(self.id)] prev_utc_start = "" (host, beam, subband) = self.cfg["STREAM_" + stream_id].split(":") (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + subband].split(":") # wait up to 10s for the SMRB to be created smrb_wait = 10 cmd = "dada_dbmetric -k " + db_key_in self.binary_list.append (cmd) rval = 1 while rval and smrb_wait > 0 and not self.quit_event.isSet(): rval, lines = self.system (cmd) if rval: time.sleep(1) smrb_wait -= 1 if rval: self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " + "key=" + db_key_in) self.quit_event.set() else: while (not self.quit_event.isSet()): cmd = "dada_header -k " + db_key_in self.log(0, cmd) self.binary_list.append (cmd) rval, lines = self.system (cmd) self.binary_list.remove (cmd) # if the command returned ok and we have a header if rval != 0: if self.quit_event.isSet(): self.log (2, cmd + " failed, but quit_event true") else: self.log (-2, cmd + " failed") self.quit_event.set() elif len(lines) == 0: self.log (-2, "header was empty") self.quit_event.set() else: header = Config.parseHeader (lines) utc_start = header["UTC_START"] self.log (1, "UTC_START=" + header["UTC_START"]) self.log (1, "RESOLUTION=" + header["RESOLUTION"]) # default processing commands fold_cmd = "dada_dbnull -s -k " + db_key_in trans_cmd = "dada_dbnull -s -k " + db_key_out search_cmd = "dada_dbnull -s -k " + db_key_in if prev_utc_start == utc_start: self.log (-2, "UTC_START [" + utc_start + "] repeated, ignoring observation") else: beam = self.cfg["BEAM_" + str(self.beam_id)] if not float(bw) == float(header["BW"]): self.log (-1, "configured bandwidth ["+bw+"] != header["+header["BW"]+"]") if not float(cfreq) == float(header["FREQ"]): self.log (-1, "configured cfreq ["+cfreq+"] != header["+header["FREQ"]+"]") if not int(nchan) == int(header["NCHAN"]): self.log (-2, "configured nchan ["+nchan+"] != header["+header["NCHAN"]+"]") source = header["SOURCE"] # output directories suffix = "/processing/" + beam + "/" + utc_start + "/" + source + "/" + cfreq fold_dir = self.cfg["CLIENT_FOLD_DIR"] + suffix trans_dir = self.cfg["CLIENT_TRANS_DIR"] + suffix search_dir = self.cfg["CLIENT_SEARCH_DIR"] + suffix fold = False search = False trans = False try: fold = (header["PERFORM_FOLD"] == "1") search = (header["PERFORM_SEARCH"] == "1") trans = (header["PERFORM_TRANS"] == "1") except KeyError as e: fold = True search = False trans = False if fold: os.makedirs (fold_dir, 0755) fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -overlap -minram 4000 -x 16384 -b 1024 -L 5 -no_dyn" fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 512 -b 1024 -L 10 -no_dyn -skz -skzs 4 -skzm 128 -skz_no_tscr -skz_no_fscr" #fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 2048 -b 1024 -Lmin 7 -L 8 -no_dyn" fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -minram 2048 -x 1024 -b 1024 -L 8 -Lmin 7 -no_dyn" #fold_cmd = "dada_dbdisk -k " + db_key_in + " -s -D " + fold_dir header_file = fold_dir + "/obs.header" Config.writeDictToCFGFile (header, header_file) if search or trans: os.makedirs (search_dir, 0755) search_cmd = "digifil " + db_key_filename + " -c -B 10 -o " + utc_start + " .fil" if trans: search_cmd += " -k " + db_key_out if trans and int(self.cfg["NUM_SUBBAND"] ) == "1": os.makedirs (trans_dir, 0755) trans_cmd = "heimdall -k " + db_key_out + " -gpu_id 1" log_host = self.cfg["SERVER_HOST"] log_port = int(self.cfg["SERVER_LOG_PORT"]) # setup output pipes fold_log_pipe = LogSocket ("fold_src", "fold_src", str(self.id), "stream", log_host, log_port, int(DL)) #trans_log_pipe = LogSocket ("trans_src", "trans_src", str(self.id), "stream", # log_host, log_port, int(DL)) #search_log_pipe = LogSocket ("search_src", "search_src", str(self.id), "stream", # log_host, log_port, int(DL)) fold_log_pipe.connect() self.binary_list.append (fold_cmd) #self.binary_list.append (trans_cmd) #self.binary_list.append (search_cmd) # create processing threads self.log (2, "creating processing threads") cmd = "numactl -C " + cpu_core + " -- " + fold_cmd fold_thread = procThread (cmd, fold_dir, fold_log_pipe.sock, 1) #trans_thread = procThread (trans_cmd, self.log_sock.sock, 2) #search_thread = procThread (search_cmd, self.log_sock.sock, 2) # start processing threads self.log (2, "starting processing threads") self.log (1, "START " + fold_cmd) fold_thread.start() #trans_thread.start() #search_thread.start() # join processing threads self.log (2, "waiting for fold thread to terminate") rval = fold_thread.join() self.log (2, "fold thread joined") self.log (1, "END " + fold_cmd) # remove the binary command from the list self.binary_list.remove (fold_cmd) if rval: self.log (-2, "fold thread failed") quit_event.set() #self.log (2, "joining trans thread") #rval = trans_thread.join() #self.log (2, "trans thread joined") #if rval: # self.log (-2, "trans thread failed") # quit_event.set() #self.log (2, "joining search thread") #rval = search_thread.join() #self.log (2, "search thread joined") #if rval: # self.log (-2, "search thread failed") # quit_event.set() fold_log_pipe.close() #trans_log_pipe.close() #search_log_pipe.close() self.log (1, "processing completed")