def run(self, acc_scale=True): logger = self.logger logger.info('Data reception on port %i.' % self.data_port) self.rx = rx = spead.TransportUDPrx( self.data_port, pkt_count=1024, buffer_size=51200000) self.running_event.set() try: ig = spead.ItemGroup() idx = -1 dump_size = 0 datasets = {} datasets_index = {} # we need these bits of meta data before being able to assemble and transmit # signal display data meta_required = ['n_chans', 'bandwidth', 'n_bls', 'n_xengs', 'center_freq', 'bls_ordering', 'n_accs'] meta = {} for heap in spead.iterheaps(rx): idx += 1 ig.update(heap) logger.debug('PROCESSING HEAP idx(%i) cnt(%i) @ %.4f' % ( idx, heap.heap_cnt, time.time())) # output item values specified try: xeng_raw = ig.get_item('xeng_raw') except KeyError: xeng_raw = None if xeng_raw is None: logger.info('Skipping heap {} since no xeng_raw was found'.format(idx)) continue if xeng_raw.has_changed(): ig_copy = copy.deepcopy(ig) try: self.data_queue.put_nowait(ig_copy) except Queue.Full: logger.info('Data Queue full, disposing of heap {}.'.format(idx)) try: self.data_callback(ig_copy) except Exception: logger.exception( 'Unhandled exception while calling data_callback()') xeng_raw.unset_changed() # should we quit? if self.quit_event.is_set(): logger.info('Got a signal from main(), exiting rx loop...') break finally: try: rx.stop() logger.info("SPEAD receiver stopped") except Exception: logger.exception('Exception trying to stop self.rx') self.quit_event.clear() self.running_event.clear()
def receive(): while True: t = spead.TransportUDPrx(PORT, pkt_count=1024, buffer_size=5120000) ig = spead.ItemGroup() print "Initializing new item group and waiting for data..." s_time = 0 total_bytes = 0 for heap in spead.iterheaps(t): if s_time == 0: s_time = time.time() h_time = time.time() ig.update(heap) h_time = time.time() - h_time total_bytes += heap.heap_len print '\nDecoded heap %i of length %i in %f (%f MBps) seconds.' % ( heap.heap_cnt, heap.heap_len, h_time, heap.heap_len / (h_time * 1024 * 1024)) if heap.heap_len == 0: continue print 'Items\n=====' for name in ig.keys(): item = ig.get_item(name) print 'Name:', name, ', Transport Type:', (item.dtype is not None and 'numpy' or 'std'), \ ', Shape:', item.shape if name == 'data_timestamp': tt = time.time() - (ig[name][0] / 1000.0) print 'Transport time for timestamp %i is %f (%f MBps)' % ( ig[name][0], tt, heap.heap_len / (tt * 1024 * 1024)) s_time = time.time() - s_time print 'Received stop. Stream processed %i bytes in %f seconds (%f MBps).' % ( total_bytes, s_time, total_bytes / (s_time * 1024 * 1024)) t.stop() if options.profile: break time.sleep(2)
def receive(): while True: t = spead.TransportUDPrx(PORT, pkt_count=1024, buffer_size=5120000) ig = spead.ItemGroup() print "Initializing new item group and waiting for data..." s_time = 0 total_bytes = 0 for heap in spead.iterheaps(t): if s_time == 0: s_time = time.time() h_time = time.time() ig.update(heap) h_time = time.time() - h_time total_bytes += heap.heap_len print '\nDecoded heap %i of length %i in %f (%f MBps) seconds.' % (heap.heap_cnt, heap.heap_len, h_time, heap.heap_len/(h_time*1024*1024)) if heap.heap_len == 0: continue print 'Items\n=====' for name in ig.keys(): item = ig.get_item(name) print 'Name:', name, ', Transport Type:', (item.dtype is not None and 'numpy' or 'std'), \ ', Shape:', item.shape if name == 'data_timestamp': tt = time.time() - (ig[name][0]/1000.0) print 'Transport time for timestamp %i is %f (%f MBps)' % (ig[name][0], tt, heap.heap_len/(tt*1024*1024)) s_time = time.time() - s_time print 'Received stop. Stream processed %i bytes in %f seconds (%f MBps).' % (total_bytes, s_time, total_bytes/(s_time*1024*1024)) t.stop() if options.profile: break time.sleep(2)
def test_iterheaps(self): rx_tport = S.TransportFile(self.filename, 'r') heaps = [f for f in S.iterheaps(rx_tport)] self.assertEqual(len(heaps), 2) heap = heaps[0] ig = S.ItemGroup() ig.update(heap) self.assertEqual(ig['var1'], 1) self.assertEqual(ig['var2'], 2)
def receive(): print 'RX: Initializing...' t = spead.TransportFile(sys.stdin) ig = spead.ItemGroup() for heap in spead.iterheaps(t): ig.update(heap) print 'Got heap:', ig.heap_cnt for name in ig.keys(): print ' ', name item = ig.get_item(name) print ' Description: ', item.description print ' Format: ', item.format print ' Shape: ', item.shape print ' Value: ', ig[name] print 'RX: Done.'
def receive(): print 'RX: Initializing...' t = spead.TransportFile(FILENAME, 'r') ig = spead.ItemGroup() for heap in spead.iterheaps(t): ig.update(heap) print 'Got heap:', ig.heap_cnt for name in ig.keys(): print ' ', name item = ig.get_item(name) print ' Description: ', item.description print ' Format: ', [item.format] print ' Shape: ', item.shape print ' Value: ', ig[name] print 'RX: Done.'
def receive(): timesdone = 0 f = open('/tmp/woo', 'w') f.close() print 'RX: Initializing...' t = spead.TransportUDPrx(PORT) ig = spead.ItemGroup() last_timestamp = 0 timestamp = 0 for heap in spead.iterheaps(t): #print spead.readable_heap(heap) ig.update(heap) print 'Got heap cnt(%d):' % ig.heap_cnt for name in ig.keys(): print ' ', name item = ig.get_item(name) print ' Description: ', item.description print ' Format: ', item.format print ' Shape: ', item.shape # print ' Value: ', ig[name] if ig[name] is not None: if name == 'timestamp': last_timestamp = timestamp timestamp = ig[name] elif name == 'xeng_raw': global heapdata heapdata = [] heapdata[:] = ig[name] f = open('/tmp/woo', 'a') f.write('timestamp(%d) timediff(%d)\n' % (timestamp, timestamp - last_timestamp)) for data in heapdata: for bls_ctr, data_ in enumerate(data): fstr = 'acc_ctr(%010d) baseline(%02d) freq(%04d) ' % (data_[1], data_[0] & 63, (data_[0] >> 6) & 4095) f.write('%s\n' % fstr) # print data # data = int(data) # timestep = data & 0xffffffff # bls = (data >> 32) & 63 # freq = (data >> 38) & 4095 # f.write('%5i\t%3i\t%10i\n' % (freq, bls, timestep)) f.close() timesdone += 1 if timesdone == 2: del ig del t return print 'RX: Done.'
def receive(): print 'RX: Initializing...' t = spead.TransportUDPrx(PORT) ig = spead.ItemGroup() for heap in spead.iterheaps(t): #print spead.readable_heap(heap) ig.update(heap) print 'Got heap:', ig.heap_cnt for name in ig.keys(): print ' ', name item = ig.get_item(name) print ' Description: ', item.description print ' Format: ', item.format print ' Shape: ', item.shape print ' Value: ', ig[name] print 'RX: Done.'
def receive(): print 'RX: initializing' tport = spead.TransportUDPrx(PORT) ig = spead.ItemGroup() print 'RX: listening' pv_t1, pv_tx_time = 0, 0 for heap in spead.iterheaps(tport): t1 = time.time() ig.update(heap) t2 = time.time() t_total = t2 - ig['tx_time'] t_update = t2 - t1 t_rx_heap = pv_t1 - ig['pv_time'] print 't_total:', t_total print 't_update:', t_update print 't_rx_heap (prev):', t_rx_heap print 't_tx (prev):', ig['pv_time'] - pv_tx_time print '-' * 60 pv_t1 = t1 pv_tx_time = ig['tx_time'] print 'RX: stop'
def rx_cont(self, data_port=7148, acc_scale=True, filename=None, items=None, **kwargs): logger = self.logger logger.info('Data reception on port %i.' % data_port) rx = spead.TransportUDPrx(data_port, pkt_count=1024, buffer_size=51200000) # group = '239.2.0.100' # addrinfo = socket.getaddrinfo(group, None)[0] # group_bin = socket.inet_pton(addrinfo[0], addrinfo[4][0]) # mreq = group_bin + struct.pack('=I', socket.INADDR_ANY) # # s.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) # # # # print "Subscribing to %s." % group h5_file = None if filename is not None: logger.info('Starting file %s.' % filename) h5_file = h5py.File(filename, mode='w') ig = spead.ItemGroup() idx = 0 dump_size = 0 datasets = {} datasets_index = {} # we need these bits of meta data before being able to assemble and transmit signal display data meta_required = ['n_chans', 'bandwidth', 'n_bls', 'n_xengs', 'center_freq', 'bls_ordering', 'n_accs'] meta = {} for heap in spead.iterheaps(rx): ig.update(heap) logger.debug('PROCESSING HEAP idx(%i) cnt(%i) @ %.4f' % (idx, heap.heap_cnt, time.time())) # output item values specified if items is not None: for name in ig.keys(): if name in items: item = ig.get_item(name) if item.has_changed(): #decode flags if name == 'flags_xeng_raw': #application level debug flags corrupt_flag = np.uint32((ig[name] / np.uint64(2**33))) & np.uint32(1); logger.info('(%s) corrupt => %s'%(time.ctime(), 'true' if corrupt_flag == 1 else 'false')) over_range_flag = np.uint32((ig[name] / np.uint64(2**32))) & np.uint32(1); logger.info('(%s) over range => %s'%(time.ctime(), 'true' if over_range_flag == 1 else 'false')) noise_diode_flag = np.uint32((ig[name] / np.uint64(2**31))) & np.uint32(1); logger.info('(%s) noise diode => %s'%(time.ctime(), 'true' if noise_diode_flag == 1 else 'false')) #debug flags not exposed externally #digitiser flags noise_diode0_flag = np.uint32((ig[name] / np.uint64(2**1))) & np.uint32(1); logger.info('(%s) polarisation 0 noise diode => %s'%(time.ctime(), 'true' if noise_diode0_flag == 1 else 'false')) noise_diode1_flag = np.uint32((ig[name] / np.uint64(2**0))) & np.uint32(1); logger.info('(%s) polarisation 1 noise diode => %s'%(time.ctime(), 'true' if noise_diode1_flag == 1 else 'false')) adc_or0_flag = np.uint32((ig[name] / np.uint64(2**3))) & np.uint32(1); logger.info('(%s) polarisation 0 adc over-range => %s'%(time.ctime(), 'true' if adc_or0_flag == 1 else 'false')) adc_or1_flag = np.uint32((ig[name] / np.uint64(2**2))) & np.uint32(1); logger.info('(%s) polarisation 1 adc over-range => %s'%(time.ctime(), 'true' if adc_or1_flag == 1 else 'false')) #f-engine flags f_spead_error_flag = np.uint32((ig[name] / np.uint64(2**8))) & np.uint32(1); logger.info('(%s) f-engine spead reception error => %s'%(time.ctime(), 'true' if f_spead_error_flag == 1 else 'false')) f_fifo_of_flag = np.uint32((ig[name] / np.uint64(2**9))) & np.uint32(1); logger.info('(%s) f-engine reception FIFO overflow => %s'%(time.ctime(), 'true' if f_fifo_of_flag == 1 else 'false')) f_pkt_of_flag = np.uint32((ig[name] / np.uint64(2**10))) & np.uint32(1); logger.info('(%s) f-engine reception packet overflow => %s'%(time.ctime(), 'true' if f_pkt_of_flag == 1 else 'false')) f_discarding_flag = np.uint32((ig[name] / np.uint64(2**11))) & np.uint32(1); logger.info('(%s) f-engine packet discarded => %s'%(time.ctime(), 'true' if f_discarding_flag == 1 else 'false')) f_timed_out_flag = np.uint32((ig[name] / np.uint64(2**12))) & np.uint32(1); logger.info('(%s) f-engine timed out waiting for valid timestamp => %s'%(time.ctime(), 'true' if f_discarding_flag == 1 else 'false')) f_rcv_error_flag = np.uint32((ig[name] / np.uint64(2**13))) & np.uint32(1); logger.info('(%s) f-engine receive error => %s'%(time.ctime(), 'true' if f_rcv_error_flag == 1 else 'false')) f_pfb_or1_flag = np.uint32((ig[name] / np.uint64(2**14))) & np.uint32(1); logger.info('(%s) f-engine PFB 1 over-range => %s'%(time.ctime(), 'true' if f_pfb_or1_flag == 1 else 'false')) f_pfb_or0_flag = np.uint32((ig[name] / np.uint64(2**15))) & np.uint32(1); logger.info('(%s) f-engine PFB 0 over-range => %s'%(time.ctime(), 'true' if f_pfb_or0_flag == 1 else 'false')) f_qdr1_flag = np.uint32((ig[name] / np.uint64(2**16))) & np.uint32(1); logger.info('(%s) f-engine QDR SRAM 1 parity error => %s'%(time.ctime(), 'true' if f_qdr1_flag == 1 else 'false')) f_qdr0_flag = np.uint32((ig[name] / np.uint64(2**17))) & np.uint32(1); logger.info('(%s) f-engine QDR SRAM 0 parity error => %s'%(time.ctime(), 'true' if f_qdr0_flag == 1 else 'false')) #x-engine flags x_spead_error_flag = np.uint32((ig[name] / np.uint64(2**24))) & np.uint32(1); logger.info('(%s) x-engine spead reception error => %s'%(time.ctime(), 'true' if x_spead_error_flag == 1 else 'false')) #convert timestamp elif name == 'timestamp': sd_timestamp = ig['sync_time'] + (ig['timestamp'] / float(ig['scale_factor_timestamp'])) logger.info('(%s) timestamp => %s'%(time.ctime(), time.ctime(sd_timestamp))) # generic output of item covnerted to string else: logger.info('(%s) %s => %s'%(time.ctime(), name, str(ig[name]))) if h5_file is not None: for name in ig.keys(): logger.debug('\tkey name %s' % name) item = ig.get_item(name) if (not item.has_changed()) and (name in datasets.keys()): # the item is not marked as changed, and we have a record for it, skip ahead continue if name in meta_required: meta[name] = ig[name] meta_required.pop(meta_required.index(name)) if len(meta_required) == 0: logger.info('Got all required metadata. Expecting data frame shape of %i %i %i' % (meta['n_chans'], meta['n_bls'], 2)) meta_required = ['n_chans', 'bandwidth', 'n_bls', 'n_xengs', 'center_freq', 'bls_ordering', 'n_accs'] # check to see if we have encountered this type before if name not in datasets.keys(): datasets[name] = ig[name] datasets_index[name] = 0 # check to see if we have stored this type before if name not in h5_file.keys(): shape = ig[name].shape if item.shape == -1 else item.shape dtype = np.dtype(type(ig[name])) if shape == [] else item.dtype if dtype is None: dtype = ig[name].dtype # if we can't get a dtype from the descriptor try and get one from the value if dtype != 'object': logger.info('Creating dataset for %s (%s,%s).' % (str(name), str(shape), str(dtype))) if h5_file is not None: h5_file.create_dataset(name, [1] + ([] if list(shape) == [1] else list(shape)), maxshape=[None] + ([] if list(shape) == [1] else list(shape)), dtype=dtype) if not item.has_changed(): continue # if we built from an empty descriptor else: logger.info('Adding %s to dataset. New size is %i.' % (name, datasets_index[name]+1)) if h5_file is not None: h5_file[name].resize(datasets_index[name]+1, axis=0) if name.startswith('xeng_raw'): sd_timestamp = ig['sync_time'] + (ig['timestamp'] / float(ig['scale_factor_timestamp'])) logger.info("SD Timestamp: %f (%s)." % (sd_timestamp, time.ctime(sd_timestamp))) scale_factor = float(meta['n_accs'] if ('n_accs' in meta.keys() and acc_scale) else 1) scaled_data = (ig[name] / scale_factor).astype(np.float32) logger.info("Sending signal display frame with timestamp %i (%s). %s. Max: %i, Mean: %i" % ( sd_timestamp, time.ctime(sd_timestamp), "Unscaled" if not acc_scale else "Scaled by %i" % scale_factor, np.max(scaled_data), np.mean(scaled_data))) if h5_file is not None: h5_file[name][datasets_index[name]] = ig[name] datasets_index[name] += 1 # we have dealt with this item so continue... item.unset_changed() idx += 1 # /if h5_file is not None if len(plot_baselines) > 0: if 'xeng_raw' in ig.keys(): if ig['xeng_raw'] is not None: # print np.shape(ig['xeng_raw']) baseline_data = [] for baseline in range(0, 40): # print 'baseline %i:' % baseline, ig['xeng_raw'][:, baseline] FREQ_TO_PLOT = 0 print 'f_%i bls_%i:' % (FREQ_TO_PLOT, baseline), ig['xeng_raw'][FREQ_TO_PLOT, baseline] # if baseline in [39, 9, 21, 33]: if baseline in plot_baselines: bdata = ig['xeng_raw'][:, baseline] powerdata = [] for complex_tuple in bdata: pwr = np.sqrt(complex_tuple[0]**2 + complex_tuple[1]**2) powerdata.append(pwr) baseline_data.append((baseline, powerdata[plot_startchan:plot_endchan])) # break if not got_data_event.is_set(): plotqueue.put(baseline_data) got_data_event.set() # /if plotbaseline is not None: # if len(items) > 0 # should we quit? if self.quit_event.is_set(): logger.info('Got a signal from main(), exiting rx loop...') break # for (name,idx) in datasets_index.iteritems(): # if idx == 1: # self.logger.info("Repacking dataset %s as an attribute as it is singular."%name) # h5_file['/'].attrs[name] = h5_file[name].value[0] # h5_file.__delitem__(name) if h5_file is not None: logger.info("Got a SPEAD end-of-stream marker. Closing File.") h5_file.flush() h5_file.close() rx.stop() logger.info("Files and sockets closed.") self.quit_event.clear()
def rx_cont(self,data_port=7148, sd_ip='127.0.0.1', sd_port=7149,acc_scale=True, filename=None,**kwargs): logger=self.logger logger.info("Data reception on port %i."%data_port) rx = spead.TransportUDPrx(data_port, pkt_count=1024, buffer_size=51200000) logger.info("Sending Signal Display data to %s:%i."%(sd_ip,sd_port)) tx_sd = spead.Transmitter(spead.TransportUDPtx(sd_ip, sd_port)) ig = spead.ItemGroup() ig_sd = spead.ItemGroup() if filename == None: filename=str(int(time.time())) + ".synth.h5" logger.info("Starting file %s."%(filename)) f = h5py.File(filename, mode="w") data_ds = None ts_ds = None idx = 0 dump_size = 0 datasets = {} datasets_index = {} meta_required = ['n_chans','bandwidth','n_bls','n_xengs','center_freq','bls_ordering'] # we need these bits of meta data before being able to assemble and transmit signal display data meta_desired = ['n_accs'] meta = {} for heap in spead.iterheaps(rx): ig.update(heap) logger.debug("PROCESSING HEAP idx(%i) cnt(%i) @ %.4f" % (idx, heap.heap_cnt, time.time())) for name in ig.keys(): item = ig.get_item(name) if not item._changed and datasets.has_key(name): continue # the item is not marked as changed, and we have a record for it if name in meta_desired: meta[name] = ig[name] if name in meta_required: meta[name] = ig[name] meta_required.pop(meta_required.index(name)) if len(meta_required) == 0: #sd_frame = np.zeros((meta['n_chans'],meta['n_bls'],2),dtype=np.float32) logger.info("Got all required metadata. Expecting data frame shape of %i %i %i"%(meta['n_chans'],meta['n_bls'],2)) meta_required = ['n_chans','bandwidth','n_bls','n_xengs','center_freq','bls_ordering'] ig_sd = spead.ItemGroup() for meta_item in meta_required: ig_sd.add_item( name=ig.get_item(meta_item).name, id=ig.get_item(meta_item).id, description=ig.get_item(meta_item).description, #shape=ig.get_item(meta_item).shape, #fmt=ig.get_item(meta_item).format, init_val=ig.get_item(meta_item).get_value()) tx_sd.send_heap(ig_sd.get_heap()) if not datasets.has_key(name): # check to see if we have encountered this type before shape = ig[name].shape if item.shape == -1 else item.shape dtype = np.dtype(type(ig[name])) if shape == [] else item.dtype if dtype is None: dtype = ig[name].dtype # if we can't get a dtype from the descriptor try and get one from the value logger.info("Creating dataset for %s (%s,%s)."%(str(name),str(shape),str(dtype))) f.create_dataset(name,[1] + ([] if list(shape) == [1] else list(shape)), maxshape=[None] + ([] if list(shape) == [1] else list(shape)), dtype=dtype) dump_size += np.multiply.reduce(shape) * dtype.itemsize datasets[name] = f[name] datasets_index[name] = 0 if not item._changed: continue # if we built from and empty descriptor else: logger.info("Adding %s to dataset. New size is %i."%(name,datasets_index[name]+1)) f[name].resize(datasets_index[name]+1, axis=0) if name.startswith("xeng_raw"): sd_timestamp = ig['sync_time'] + (ig['timestamp'] / float(ig['scale_factor_timestamp'])) #logger.info("SD Timestamp: %f (%s)."%(sd_timestamp,time.ctime(sd_timestamp))) scale_factor=float(meta['n_accs'] if (meta.has_key('n_accs') and acc_scale) else 1) scaled_data = (ig[name]/scale_factor).astype(np.float32) # reinit the group to force meta data resend ig_sd = spead.ItemGroup() ig_sd.add_item(name=('sd_data'), id=(0x3501), description="Combined raw data from all x engines.", ndarray=(scaled_data.dtype,scaled_data.shape)) ig_sd.add_item(name=('sd_timestamp'), id=0x3502, description='Timestamp of this sd frame in centiseconds since epoch (40 bit limitation).', init_val=sd_timestamp) #shape=[], #fmt=spead.mkfmt(('u',spead.ADDRSIZE))) t_it = ig_sd.get_item('sd_data') logger.debug("Added SD frame with shape %s, dtype %s"%(str(t_it.shape),str(t_it.dtype))) tx_sd.send_heap(ig_sd.get_heap()) logger.info("Sending signal display frame with timestamp %i (%s). %s. Max: %i, Mean: %i"%( sd_timestamp, time.ctime(sd_timestamp), "Unscaled" if not acc_scale else "Scaled by %i" % (scale_factor), np.max(scaled_data), np.mean(scaled_data))) ig_sd['sd_data'] = scaled_data ig_sd['sd_timestamp'] = sd_timestamp * 100 #ig_sd['sd_timestamp'] = sd_timestamp tx_sd.send_heap(ig_sd.get_heap()) f[name][datasets_index[name]] = ig[name] datasets_index[name] += 1 item._changed = False # we have dealt with this item so continue... idx+=1 # for (name,idx) in datasets_index.iteritems(): # if idx == 1: # self.logger.info("Repacking dataset %s as an attribute as it is singular."%name) # f['/'].attrs[name] = f[name].value[0] # f.__delitem__(name) logger.info("Got a SPEAD end-of-stream marker. Closing File.") f.flush() f.close() rx.stop() ig_sd = None sd_timestamp = None logger.info("Files and sockets closed.")
def rx_inter(self,data_port=7148, sd_ip='127.0.0.1', sd_port=7149, acc_scale=True, filename=None, **kwargs): ''' Process SPEAD data from X engines and forward it to the SD. ''' print 'WARNING: This function is not yet tested. YMMV.' logger=self.logger logger.info("Data reception on port %i."%data_port) rx = spead.TransportUDPrx(data_port, pkt_count=1024, buffer_size=51200000) logger.info("Sending Signal Display data to %s:%i."%(sd_ip,sd_port)) tx_sd = spead.Transmitter(spead.TransportUDPtx(sd_ip, sd_port)) ig = spead.ItemGroup() ig_sd = spead.ItemGroup() if filename == None: filename=str(int(time.time())) + ".synth.h5" logger.info("Starting file %s."%(filename)) f = h5py.File(filename, mode="w") data_ds = None ts_ds = None idx = 0 dump_size = 0 datasets = {} datasets_index = {} # we need these bits of meta data before being able to assemble and transmit signal display data meta_required = ['n_chans','n_bls','n_xengs','center_freq','bls_ordering','bandwidth'] meta_desired = ['n_accs'] meta = {} sd_frame = None sd_slots = None timestamp = None # log the latest timestamp for which we've stored data currentTimestamp = -1 # iterate through SPEAD heaps returned by the SPEAD receiver. for heap in spead.iterheaps(rx): ig.update(heap) logger.debug("PROCESSING HEAP idx(%i) cnt(%i) @ %.4f" % (idx, heap.heap_cnt, time.time())) for name in ig.keys(): item = ig.get_item(name) # the item is not marked as changed and we already have a record for it, continue if not item._changed and datasets.has_key(name): continue logger.debug("PROCESSING KEY %s @ %.4f" % (name, time.time())) if name in meta_desired: meta[name] = ig[name] if name in meta_required: meta[name] = ig[name] meta_required.pop(meta_required.index(name)) if len(meta_required) == 0: sd_frame = np.zeros((meta['n_chans'],meta['n_bls'],2),dtype=np.float32) logger.info("Got all required metadata. Initialised sd frame to shape %s"%(str(sd_frame.shape))) meta_required = ['n_chans','bandwidth','n_bls','n_xengs','center_freq','bls_ordering'] ig_sd = spead.ItemGroup() for meta_item in meta_required: ig_sd.add_item( name=ig.get_item(meta_item).name, id=ig.get_item(meta_item).id, description=ig.get_item(meta_item).description, #shape=ig.get_item(meta_item).shape, #fmt=ig.get_item(meta_item).format, init_val=ig.get_item(meta_item).get_value()) tx_sd.send_heap(ig_sd.get_heap()) sd_slots = np.zeros(meta['n_xengs']) if not datasets.has_key(name): # check to see if we have encountered this type before shape = ig[name].shape if item.shape == -1 else item.shape dtype = np.dtype(type(ig[name])) if shape == [] else item.dtype if dtype is None: dtype = ig[name].dtype # if we can't get a dtype from the descriptor, try and get one from the value logger.info("Creating dataset for %s (%s,%s)."%(str(name),str(shape),str(dtype))) f.create_dataset(name,[1] + ([] if list(shape) == [1] else list(shape)), maxshape=[None] + ([] if list(shape) == [1] else list(shape)), dtype=dtype) dump_size += np.multiply.reduce(shape) * dtype.itemsize datasets[name] = f[name] datasets_index[name] = 0 # if we built from an empty descriptor if not item._changed: continue else: logger.info("Adding %s to dataset. New size is %i."%(name,datasets_index[name]+1)) f[name].resize(datasets_index[name]+1, axis=0) # now we store this x engine's data for sending sd data. if sd_frame is not None and name.startswith("xeng_raw"): xeng_id = int(name[8:]) sd_frame[xeng_id::meta['n_xengs']] = ig[name] logger.debug('Received data for Xeng %i @ %.4f' % (xeng_id, time.time())) # we got a timestamp. if sd_frame is not None and name.startswith("timestamp"): xeng_id = int(name[9:]) timestamp = ig['sync_time'] + (ig[name] / ig['scale_factor_timestamp']) #in seconds since unix epoch localTime = time.time() print "Decoded timestamp for Xeng", xeng_id, ":", timestamp, " (", time.ctime(timestamp),") @ %.4f" % localTime, " ", time.ctime(localTime), "diff(", localTime-timestamp, ")" # is this timestamp in the past? if currentTimestamp > timestamp: errorString = "Timestamp %.2f (%s) is earlier than the current timestamp %.2f (%s). Ignoring..." % (timestamp, time.ctime(timestamp), currentTimestamp, time.ctime(currentTimestamp)) logger.warning(errorString) continue # is this a new timestamp before a complete set? if (timestamp > currentTimestamp) and sd_slots.any(): errorString = "New timestamp %.2f from Xeng%i before previous set %.2f sent" % (timestamp, xeng_id, currentTimestamp) logger.warning(errorString) sd_slots = np.zeros(meta['n_xengs']) sd_frame = np.zeros((ig['n_chans'],ig['n_bls'],2),dtype=sd_frame.dtype) currentTimestamp = -1 continue # is this new timestamp in the past for this X engine? if timestamp <= sd_slots[xeng_id]: errorString = 'Xeng%i already on timestamp %.2f but got %.2f now, THIS SHOULD NOT HAPPEN' % (xeng_id, sd_slots[xeng_id], timestamp) logger.error(errorString) raise RuntimeError(errorString) # update our info on which integrations we have sd_slots[xeng_id] = timestamp currentTimestamp = timestamp # do we have integration data and timestamps for all the xengines? If so, send the SD frame. if timestamp is not None and sd_frame is not None and sd_slots is not None and sd_slots.all(): ig_sd = spead.ItemGroup() # make sure we have the right dtype for the sd data ig_sd.add_item(name=('sd_data'), id=(0x3501), description="Combined raw data from all x engines.", ndarray=(sd_frame.dtype,sd_frame.shape)) ig_sd.add_item(name=('sd_timestamp'), id=0x3502, description='Timestamp of this sd frame in centiseconds since epoch (40 bit limitation).', shape=[], fmt=spead.mkfmt(('u',spead.ADDRSIZE))) t_it = ig_sd.get_item('sd_data') logger.info("Added SD frame with shape %s, dtype %s" % (str(t_it.shape),str(t_it.dtype))) scale_factor=(meta['n_accs'] if meta.has_key('n_accs') else 1) logger.info("Sending signal display frame with timestamp %i (%s). %s. @ %.4f" % (timestamp, time.ctime(timestamp), "Unscaled" if not acc_scale else "Scaled by %i" % (scale_factor), time.time())) ig_sd['sd_data'] = sd_frame.astype(np.float32) if not acc_scale else (sd_frame / float(scale_factor)).astype(np.float32) ig_sd['sd_timestamp'] = int(timestamp * 100) tx_sd.send_heap(ig_sd.get_heap()) # reset the arrays that hold integration data sd_slots = np.zeros(meta['n_xengs']) sd_frame = np.zeros((ig['n_chans'],ig['n_bls'],2),dtype=sd_frame.dtype) timestamp = None f[name][datasets_index[name]] = ig[name] datasets_index[name] += 1 item._changed = False idx+=1 logger.info("Got a SPEAD end-of-stream marker. Closing File.") f.flush() f.close() rx.stop() sd_frame = None sd_slots = None ig_sd = None
def test_heaplen(self): data = open(self.filename).read() rx_tport = S.TransportString(data) for _ in S.iterheaps(rx_tport): self.assertFalse(rx_tport.got_term_sig) self.assertTrue(rx_tport.got_term_sig)