def transmit_std(): print 'TX: Initializing standard transport TX to IP', options.ip tx = spead.Transmitter(spead.TransportUDPtx(options.ip, 8888)) ig = spead.ItemGroup() ig.add_item( name='data_timestamp', description='Timestamp in epoch ms for the current visibility sample', shape=[1], fmt=spead.mkfmt(('u', 64))) ig.add_item( name='vis_data', description='The complex visibility spectrum for a single time dump', shape=[channels, baselines, 2], fmt=spead.mkfmt(('u', 32))) # using init_val with a numpy array will use the numpy transport automatically. for x in range(iterations): ig['data_timestamp'] = int(time.time() * 1000) ig['vis_data'] = np.random.normal(size=(channels, baselines, 2)).astype(np.float32) t_heap_send = time.time() tx.send_heap(ig.get_heap()) print "Sent data for timestamp", ig['data_timestamp'], "in", time.time( ) - t_heap_send, "s" time.sleep(15) tx.end() print 'TX: Done.'
def transmit(): #print 'TX: Initializing...' tx = spead.Transmitter(spead.TransportFile(sys.stdout)) ig = spead.ItemGroup() ig.add_item(name='Var1', description='Description for Var1', shape=[], fmt=spead.mkfmt(('u',32),('u',32),('u',32)), init_val=(1,2,3)) tx.send_heap(ig.get_heap()) ig['Var1'] = (4,5,6) tx.send_heap(ig.get_heap()) ig.add_item(name='Var2', description='Description for Var2', shape=[100,100], fmt=spead.mkfmt(('u',32))) data = numpy.arange(100*100); data.shape = (100,100) ig['Var2'] = data tx.send_heap(ig.get_heap()) tx.end()
def transmit(): print 'TX: Initializing...' tx = spead.Transmitter(spead.TransportUDPtx('127.0.0.1', PORT, rate=1e9)) ig = spead.ItemGroup() ig.add_item(name='Var1', description='Description for Var1', shape=[], fmt=spead.mkfmt(('u', 32), ('u', 32), ('u', 32)), init_val=(1, 2, 3)) tx.send_heap(ig.get_heap()) ig['Var1'] = (4, 5, 6) tx.send_heap(ig.get_heap()) data = numpy.arange(100000 * 4000).astype(numpy.uint32) data.shape = (100000, 4000) ig.add_item(name='Var2', description='Description for Var2', shape=[100000, 4000], ndarray=data) ig['Var2'] = data tx.send_heap(ig.get_heap()) tx.end() print 'TX: Done.'
def transmit(): print 'TX: Initializing...' tx = spead.Transmitter(spead.TransportFile(FILENAME,'w')) ig = spead.ItemGroup() ig.add_item(name='Var1', description='Description for Var1', shape=[], fmt=spead.mkfmt(('u',32),('u',32),('u',32)), init_val=(1,2,3)) tx.send_heap(ig.get_heap()) ig['Var1'] = (4,5,6) tx.send_heap(ig.get_heap()) ig.add_item(name='Var2', description='Description for Var2', shape=[DIM,DIM], fmt=spead.mkfmt(('u',32))) data = numpy.arange(DIM*DIM); data.shape = (DIM,DIM) ig['Var2'] = data tx.send_heap(ig.get_heap()) tx.end() print 'TX: Done.'
def transmit_std(): print 'TX: Initializing standard transport TX to IP', options.ip tx = spead.Transmitter(spead.TransportUDPtx(options.ip, 8888)) ig = spead.ItemGroup() ig.add_item(name='data_timestamp', description='Timestamp in epoch ms for the current visibility sample', shape=[1], fmt=spead.mkfmt(('u', 64))) ig.add_item(name='vis_data', description='The complex visibility spectrum for a single time dump', shape=[channels, baselines, 2], fmt=spead.mkfmt(('u', 32))) # using init_val with a numpy array will use the numpy transport automatically. for x in range(iterations): ig['data_timestamp'] = int(time.time() * 1000) ig['vis_data'] = np.random.normal(size=(channels, baselines, 2)).astype(np.float32) t_heap_send = time.time() tx.send_heap(ig.get_heap()) print "Sent data for timestamp", ig['data_timestamp'], "in", time.time() - t_heap_send, "s" time.sleep(15) tx.end() print 'TX: Done.'
def send_meta(): print 'Sending meta' spead_ig = spead.ItemGroup() spead_tx = spead.Transmitter(spead.TransportUDPtx('127.0.0.1', DATA_PORT)) # ndarray = numpy.dtype(numpy.int64), (4096 * 40 * 1, 1, 1) ndarray = numpy.dtype(numpy.int32), (4096, 40, 2) spead_ig.add_item(name='xeng_raw', id=0x1800, description='raw simulator data', ndarray=ndarray) spead_ig.add_item(name='timestamp', id=0x1600, description='Timestamp', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=0) spead_tx.send_heap(spead_ig.get_heap())
def transmit(): #print 'TX: Initializing...' tx = spead.Transmitter(spead.TransportFile(sys.stdout)) ig = spead.ItemGroup() ig.add_item(name='Var1', description='Description for Var1', shape=[], fmt=spead.mkfmt(('u', 32), ('u', 32), ('u', 32)), init_val=(1, 2, 3)) tx.send_heap(ig.get_heap()) ig['Var1'] = (4, 5, 6) tx.send_heap(ig.get_heap()) ig.add_item(name='Var2', description='Description for Var2', shape=[100, 100], fmt=spead.mkfmt(('u', 32))) data = numpy.arange(100 * 100) data.shape = (100, 100) ig['Var2'] = data tx.send_heap(ig.get_heap()) tx.end()
def transmit(): print 'TX: Initializing...' tx = spead.Transmitter(spead.TransportFile(FILENAME, 'w')) ig = spead.ItemGroup() ig.add_item(name='Var1', description='Description for Var1', shape=[], fmt=spead.mkfmt(('u', 32), ('u', 32), ('u', 32)), init_val=(1, 2, 3)) tx.send_heap(ig.get_heap()) ig['Var1'] = (4, 5, 6) tx.send_heap(ig.get_heap()) ig.add_item(name='Var2', description='Description for Var2', shape=[DIM, DIM], fmt=spead.mkfmt(('u', 32))) data = numpy.arange(DIM * DIM) data.shape = (DIM, DIM) ig['Var2'] = data tx.send_heap(ig.get_heap()) tx.end() print 'TX: Done.'
def transmit(): print 'TX: Initializing...' tx = spead.Transmitter(spead.TransportUDPtx('127.0.0.1', PORT, rate=1e9)) ig = spead.ItemGroup() ig.add_item(name='Var1', description='Description for Var1', shape=[], fmt=spead.mkfmt(('u',32),('u',32),('u',32)), init_val=(1,2,3)) tx.send_heap(ig.get_heap()) ig['Var1'] = (4,5,6) tx.send_heap(ig.get_heap()) data = numpy.arange(100000*4000).astype(numpy.uint32); data.shape = (100000,4000) ig.add_item(name='Var2', description='Description for Var2', shape=[100000,4000], ndarray=data) ig['Var2'] = data tx.send_heap(ig.get_heap()) tx.end() print 'TX: Done.'
tx = spead.Transmitter(spead.TransportUDPtx('127.0.0.1', 8888, rate=1e3)) # Send N heaps for j in range(int(sys.argv[1])): ig = spead.ItemGroup() # Timestamp time = str(datetime.datetime.now().time()) ig.add_item(name='Timestamp', description='Current time', shape=[], fmt=spead.mkfmt(('c', 8)), init_val=('00:00')) ig['Timestamp'] = time # Beam ID ig.add_item(name='beam id', description='beam number', shape=[], fmt=spead.mkfmt(('u', 32)), init_val=(0)) ig['beam id'] = i # frequency ig.add_item(
def rx_inter(self,data_port=7148, sd_ip='127.0.0.1', sd_port=7149, acc_scale=True, filename=None, **kwargs): ''' Process SPEAD data from X engines and forward it to the SD. ''' print 'WARNING: This function is not yet tested. YMMV.' logger=self.logger logger.info("Data reception on port %i."%data_port) rx = spead.TransportUDPrx(data_port, pkt_count=1024, buffer_size=51200000) logger.info("Sending Signal Display data to %s:%i."%(sd_ip,sd_port)) tx_sd = spead.Transmitter(spead.TransportUDPtx(sd_ip, sd_port)) ig = spead.ItemGroup() ig_sd = spead.ItemGroup() if filename == None: filename=str(int(time.time())) + ".synth.h5" logger.info("Starting file %s."%(filename)) f = h5py.File(filename, mode="w") data_ds = None ts_ds = None idx = 0 dump_size = 0 datasets = {} datasets_index = {} # we need these bits of meta data before being able to assemble and transmit signal display data meta_required = ['n_chans','n_bls','n_xengs','center_freq','bls_ordering','bandwidth'] meta_desired = ['n_accs'] meta = {} sd_frame = None sd_slots = None timestamp = None # log the latest timestamp for which we've stored data currentTimestamp = -1 # iterate through SPEAD heaps returned by the SPEAD receiver. for heap in spead.iterheaps(rx): ig.update(heap) logger.debug("PROCESSING HEAP idx(%i) cnt(%i) @ %.4f" % (idx, heap.heap_cnt, time.time())) for name in ig.keys(): item = ig.get_item(name) # the item is not marked as changed and we already have a record for it, continue if not item._changed and datasets.has_key(name): continue logger.debug("PROCESSING KEY %s @ %.4f" % (name, time.time())) if name in meta_desired: meta[name] = ig[name] if name in meta_required: meta[name] = ig[name] meta_required.pop(meta_required.index(name)) if len(meta_required) == 0: sd_frame = np.zeros((meta['n_chans'],meta['n_bls'],2),dtype=np.float32) logger.info("Got all required metadata. Initialised sd frame to shape %s"%(str(sd_frame.shape))) meta_required = ['n_chans','bandwidth','n_bls','n_xengs','center_freq','bls_ordering'] ig_sd = spead.ItemGroup() for meta_item in meta_required: ig_sd.add_item( name=ig.get_item(meta_item).name, id=ig.get_item(meta_item).id, description=ig.get_item(meta_item).description, #shape=ig.get_item(meta_item).shape, #fmt=ig.get_item(meta_item).format, init_val=ig.get_item(meta_item).get_value()) tx_sd.send_heap(ig_sd.get_heap()) sd_slots = np.zeros(meta['n_xengs']) if not datasets.has_key(name): # check to see if we have encountered this type before shape = ig[name].shape if item.shape == -1 else item.shape dtype = np.dtype(type(ig[name])) if shape == [] else item.dtype if dtype is None: dtype = ig[name].dtype # if we can't get a dtype from the descriptor, try and get one from the value logger.info("Creating dataset for %s (%s,%s)."%(str(name),str(shape),str(dtype))) f.create_dataset(name,[1] + ([] if list(shape) == [1] else list(shape)), maxshape=[None] + ([] if list(shape) == [1] else list(shape)), dtype=dtype) dump_size += np.multiply.reduce(shape) * dtype.itemsize datasets[name] = f[name] datasets_index[name] = 0 # if we built from an empty descriptor if not item._changed: continue else: logger.info("Adding %s to dataset. New size is %i."%(name,datasets_index[name]+1)) f[name].resize(datasets_index[name]+1, axis=0) # now we store this x engine's data for sending sd data. if sd_frame is not None and name.startswith("xeng_raw"): xeng_id = int(name[8:]) sd_frame[xeng_id::meta['n_xengs']] = ig[name] logger.debug('Received data for Xeng %i @ %.4f' % (xeng_id, time.time())) # we got a timestamp. if sd_frame is not None and name.startswith("timestamp"): xeng_id = int(name[9:]) timestamp = ig['sync_time'] + (ig[name] / ig['scale_factor_timestamp']) #in seconds since unix epoch localTime = time.time() print "Decoded timestamp for Xeng", xeng_id, ":", timestamp, " (", time.ctime(timestamp),") @ %.4f" % localTime, " ", time.ctime(localTime), "diff(", localTime-timestamp, ")" # is this timestamp in the past? if currentTimestamp > timestamp: errorString = "Timestamp %.2f (%s) is earlier than the current timestamp %.2f (%s). Ignoring..." % (timestamp, time.ctime(timestamp), currentTimestamp, time.ctime(currentTimestamp)) logger.warning(errorString) continue # is this a new timestamp before a complete set? if (timestamp > currentTimestamp) and sd_slots.any(): errorString = "New timestamp %.2f from Xeng%i before previous set %.2f sent" % (timestamp, xeng_id, currentTimestamp) logger.warning(errorString) sd_slots = np.zeros(meta['n_xengs']) sd_frame = np.zeros((ig['n_chans'],ig['n_bls'],2),dtype=sd_frame.dtype) currentTimestamp = -1 continue # is this new timestamp in the past for this X engine? if timestamp <= sd_slots[xeng_id]: errorString = 'Xeng%i already on timestamp %.2f but got %.2f now, THIS SHOULD NOT HAPPEN' % (xeng_id, sd_slots[xeng_id], timestamp) logger.error(errorString) raise RuntimeError(errorString) # update our info on which integrations we have sd_slots[xeng_id] = timestamp currentTimestamp = timestamp # do we have integration data and timestamps for all the xengines? If so, send the SD frame. if timestamp is not None and sd_frame is not None and sd_slots is not None and sd_slots.all(): ig_sd = spead.ItemGroup() # make sure we have the right dtype for the sd data ig_sd.add_item(name=('sd_data'), id=(0x3501), description="Combined raw data from all x engines.", ndarray=(sd_frame.dtype,sd_frame.shape)) ig_sd.add_item(name=('sd_timestamp'), id=0x3502, description='Timestamp of this sd frame in centiseconds since epoch (40 bit limitation).', shape=[], fmt=spead.mkfmt(('u',spead.ADDRSIZE))) t_it = ig_sd.get_item('sd_data') logger.info("Added SD frame with shape %s, dtype %s" % (str(t_it.shape),str(t_it.dtype))) scale_factor=(meta['n_accs'] if meta.has_key('n_accs') else 1) logger.info("Sending signal display frame with timestamp %i (%s). %s. @ %.4f" % (timestamp, time.ctime(timestamp), "Unscaled" if not acc_scale else "Scaled by %i" % (scale_factor), time.time())) ig_sd['sd_data'] = sd_frame.astype(np.float32) if not acc_scale else (sd_frame / float(scale_factor)).astype(np.float32) ig_sd['sd_timestamp'] = int(timestamp * 100) tx_sd.send_heap(ig_sd.get_heap()) # reset the arrays that hold integration data sd_slots = np.zeros(meta['n_xengs']) sd_frame = np.zeros((ig['n_chans'],ig['n_bls'],2),dtype=sd_frame.dtype) timestamp = None f[name][datasets_index[name]] = ig[name] datasets_index[name] += 1 item._changed = False idx+=1 logger.info("Got a SPEAD end-of-stream marker. Closing File.") f.flush() f.close() rx.stop() sd_frame = None sd_slots = None ig_sd = None
def spead_issue_meta(self): """ All FxCorrelators issued SPEAD in the same way, with tweakings that are implemented by the child class. :return: <nothing> """ if self.meta_destination is None: logging.info('SPEAD meta destination is still unset, NOT sending ' 'metadata at this time.') return # make a new SPEAD transmitter del self.spead_tx, self.spead_meta_ig self.spead_tx = spead.Transmitter(spead.TransportUDPtx(*self.meta_destination)) # update the multicast socket option to use a TTL of 2, # in order to traverse the L3 network on site. ttl_bin = struct.pack('@i', 2) self.spead_tx.t._udp_out.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl_bin) #mcast_interface = self.configd['xengine']['multicast_interface_address'] #self.spead_tx.t._udp_out.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, # socket.inet_aton(mcast_interface)) # self.spead_tx.t._udp_out.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, # socket.inet_aton(txip_str) + socket.inet_aton(mcast_interface)) # make the item group we're going to use self.spead_meta_ig = spead.ItemGroup() self.spead_meta_ig.add_item(name='adc_sample_rate', id=0x1007, description='The expected ADC sample rate (samples ' 'per second) of incoming data.', shape=[], fmt=spead.mkfmt(('u', 64)), init_val=self.sample_rate_hz) self.spead_meta_ig.add_item(name='n_bls', id=0x1008, description='Number of baselines in the data product.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=len(self.xops.get_baseline_order())) self.spead_meta_ig.add_item(name='n_chans', id=0x1009, description='Number of frequency channels in ' 'an integration.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=self.n_chans) self.spead_meta_ig.add_item(name='n_ants', id=0x100A, description='The number of antennas in the system.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=self.n_antennas) self.spead_meta_ig.add_item(name='n_xengs', id=0x100B, description='The number of x-engines in the system.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=(len(self.xhosts) * self.x_per_fpga)) self.spead_meta_ig.add_item(name='bls_ordering', id=0x100C, description='The baseline ordering in the output ' 'data product.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=numpy.array( [baseline for baseline in self.xops.get_baseline_order()])) # spead_ig.add_item(name='crosspol_ordering', id=0x100D, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) metalist = numpy.array(self._spead_meta_get_labelling()) self.spead_meta_ig.add_item(name='input_labelling', id=0x100E, description='input labels and numbers', init_val=metalist) # spead_ig.add_item(name='n_bengs', id=0x100F, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) self.spead_meta_ig.add_item(name='center_freq', id=0x1011, description='The on-sky centre-frequency.', shape=[], fmt=spead.mkfmt(('f', 64)), init_val=int(self.configd['fengine']['true_cf'])) self.spead_meta_ig.add_item(name='bandwidth', id=0x1013, description='The input (analogue) bandwidth of ' 'the system.', shape=[], fmt=spead.mkfmt(('f', 64)), init_val=int(self.configd['fengine']['bandwidth'])) self.spead_meta_ig.add_item(name='n_accs', id=0x1015, description='The number of spectra that are ' 'accumulated per X-engine dump.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=self.accumulation_len * self.xeng_accumulation_len) self.spead_meta_ig.add_item(name='int_time', id=0x1016, description='The time per integration, in seconds.', shape=[], fmt=spead.mkfmt(('f', 64)), init_val=self.xops.get_acc_time()) # spead_ig.add_item(name='coarse_chans', id=0x1017, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # # spead_ig.add_item(name='current_coarse_chan', id=0x1018, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # # spead_ig.add_item(name='fft_shift_fine', id=0x101C, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # # spead_ig.add_item(name='fft_shift_coarse', id=0x101D, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) self.spead_meta_ig.add_item(name='fft_shift', id=0x101E, description='The FFT bitshift pattern. F-engine correlator internals.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=int(self.configd['fengine']['fft_shift'])) self.spead_meta_ig.add_item(name='xeng_acc_len', id=0x101F, description='Number of spectra accumulated inside X engine. ' 'Determines minimum integration time and ' 'user-configurable integration time stepsize. ' 'X-engine correlator internals.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=self.xeng_accumulation_len) quant_bits = int(self.configd['fengine']['quant_format'].split('.')[0]) self.spead_meta_ig.add_item(name='requant_bits', id=0x1020, description='Number of bits after requantisation in the ' 'F engines (post FFT and any ' 'phasing stages).', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=quant_bits) pkt_len = int(self.configd['fengine']['10gbe_pkt_len']) self.spead_meta_ig.add_item(name='feng_pkt_len', id=0x1021, description='Payload size of 10GbE packet exchange between ' 'F and X engines in 64 bit words. Usually equal ' 'to the number of spectra accumulated inside X ' 'engine. F-engine correlator internals.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=pkt_len) # port = int(self.configd['xengine']['output_destination_port']) # spead_ig.add_item(name='rx_udp_port', id=0x1022, # description='Destination UDP port for X engine output.', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=port) port = int(self.configd['fengine']['10gbe_port']) self.spead_meta_ig.add_item(name='feng_udp_port', id=0x1023, description='Port for F-engines 10Gbe links in the system.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=port) # ip = self.configd['xengine']['output_destination_ip'] # spead_ig.add_item(name='rx_udp_ip_str', id=0x1024, # description='Destination UDP IP for X engine output.', # shape=[-1], fmt=spead.STR_FMT, # init_val=ip) ip = struct.unpack('>I', socket.inet_aton(self.configd['fengine']['10gbe_start_ip']))[0] self.spead_meta_ig.add_item(name='feng_start_ip', id=0x1025, description='Start IP address for F-engines in the system.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=ip) self.spead_meta_ig.add_item(name='xeng_rate', id=0x1026, description='Target clock rate of processing engines (xeng).', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=self.xeng_clk) self.spead_meta_ig.add_item(name='sync_time', id=0x1027, description='The time at which the digitisers were synchronised. ' 'Seconds since the Unix Epoch.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=self.synchronisation_epoch) # spead_ig.add_item(name='n_stokes', id=0x1040, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) x_per_fpga = int(self.configd['xengine']['x_per_fpga']) self.spead_meta_ig.add_item(name='x_per_fpga', id=0x1041, description='Number of X engines per FPGA host.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=x_per_fpga) # n_ants_per_xaui = 1 # spead_ig.add_item(name='n_ants_per_xaui', id=0x1042, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=n_ants_per_xaui) # spead_ig.add_item(name='ddc_mix_freq', id=0x1043, # description='', # shape=[],fmt=spead.mkfmt(('f', 64)), # init_val=) # spead_ig.add_item(name='ddc_bandwidth', id=0x1044, # description='', # shape=[],fmt=spead.mkfmt(('f', 64)), # init_val=) sample_bits = int(self.configd['fengine']['sample_bits']) self.spead_meta_ig.add_item(name='adc_bits', id=0x1045, description='How many bits per ADC sample.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=sample_bits) self.spead_meta_ig.add_item(name='scale_factor_timestamp', id=0x1046, description='Timestamp scaling factor. Divide the SPEAD ' 'data packet timestamp by this number to get ' 'back to seconds since last sync.', shape=[], fmt=spead.mkfmt(('f', 64)), init_val=self.sample_rate_hz) self.spead_meta_ig.add_item(name='xeng_out_bits_per_sample', id=0x1048, description='The number of bits per value of the xeng ' 'accumulator output. Note this is for a ' 'single value, not the combined complex size.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=self.xeng_outbits) self.spead_meta_ig.add_item(name='f_per_fpga', id=0x1049, description='Number of F engines per FPGA host.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), init_val=self.f_per_fpga) # spead_ig.add_item(name='rf_gain_MyAntStr ', id=0x1200+inputN, # description='', # shape=[], fmt=spead.mkfmt(('f', 64)), # init_val=) # 0x1400 +++ self.fops.eq_update_metadata() # spead_ig.add_item(name='eq_coef_MyAntStr', id=0x1400+inputN, # description='', # shape=[], fmt=spead.mkfmt(('u', 32)), # init_val=) # ndarray = numpy.dtype(numpy.int64), (4096 * 40 * 1, 1, 1) self.spead_meta_ig.add_item(name='timestamp', id=0x1600, description='Timestamp of start of this integration. uint counting multiples ' 'of ADC samples since last sync (sync_time, id=0x1027). Divide this ' 'number by timestamp_scale (id=0x1046) to get back to seconds since ' 'last sync when this integration was actually started.', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE))) self.spead_meta_ig.add_item(name='flags_xeng_raw', id=0x1601, description='Flags associated with xeng_raw data output.' 'bit 34 - corruption or data missing during integration ' 'bit 33 - overrange in data path ' 'bit 32 - noise diode on during integration ' 'bits 0 - 31 reserved for internal debugging', shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE))) ndarray = numpy.dtype(numpy.int32), (self.n_chans, len(self.xops.get_baseline_order()), 2) self.spead_meta_ig.add_item(name='xeng_raw', id=0x1800, description='Raw data for %i xengines in the system. This item represents a ' 'full spectrum (all frequency channels) assembled from lowest ' 'frequency to highest frequency. Each frequency channel contains ' 'the data for all baselines (n_bls given by SPEAD ID 0x100b). ' 'Each value is a complex number -- two (real and imaginary) ' 'unsigned integers.' % len(self.xhosts * self.x_per_fpga), ndarray=ndarray) # TODO hard-coded !!!!!!! :( # self.spead_ig.add_item(name=("xeng_raw"),id=0x1800, # description="Raw data for %i xengines in the system. This # item represents a full spectrum (all frequency channels) assembled from lowest frequency # to highest frequency. Each frequency channel contains the data for all baselines # (n_bls given by SPEAD ID 0x100B). Each value is a complex number -- two # (real and imaginary) unsigned integers."%(32), # ndarray=(numpy.dtype(numpy.int32),(4096,((4*(4+1))/2)*4,2))) # spead_ig.add_item(name='beamweight_MyAntStr', id=0x2000+inputN, # description='', # shape=[], fmt=spead.mkfmt(('u', 32)), # init_val=) # spead_ig.add_item(name='incoherent_sum', id=0x3000, # description='', # shape=[], fmt=spead.mkfmt(('u', 32)), # init_val=) # spead_ig.add_item(name='n_inputs', id=0x3100, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='digitiser_id', id=0x3101, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='digitiser_status', id=0x3102, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='pld_len', id=0x3103, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='raw_data_MyAntStr', id=0x3300+inputN, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='Reserved for SP-CAM meta-data', id=0x7000-0x7fff, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='feng_id', id=0xf101, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='feng_status', id=0xf102, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='frequency', id=0xf103, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='raw_freq_MyAntStr', id=0xf300+inputN, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) # spead_ig.add_item(name='bf_MyBeamName', id=0xb000+beamN, # description='', # shape=[], fmt=spead.mkfmt(('u', spead.ADDRSIZE)), # init_val=) self.spead_tx.send_heap(self.spead_meta_ig.get_heap()) self.logger.info('Issued SPEAD data descriptor to %s:%i.' % (self.meta_destination[0], self.meta_destination[1]))
def rx_inter(self,data_port=7148, sd_ip='127.0.0.1', sd_port=7149, acc_scale=True, filename=None, **kwargs): ''' Process SPEAD data from X engines and forward it to the SD. ''' print 'WARNING: This function is not yet tested. YMMV.' logger=self.logger logger.info("Data reception on port %i."%data_port) rx = spead.TransportUDPrx(data_port, pkt_count=1024, buffer_size=51200000) logger.info("Sending Signal Display data to %s:%i."%(sd_ip,sd_port)) tx_sd = spead.Transmitter(spead.TransportUDPtx(sd_ip, sd_port)) ig = spead.ItemGroup() ig_sd = spead.ItemGroup() if filename == None: filename=str(int(time.time())) + ".synth.h5" logger.info("Starting file %s."%(filename)) f = h5py.File(filename, mode="w") data_ds = None ts_ds = None idx = 0 dump_size = 0 datasets = {} datasets_index = {} # we need these bits of meta data before being able to assemble and transmit signal display data meta_required = ['n_chans','n_bls','n_xengs','center_freq','bls_ordering','bandwidth'] meta_desired = ['n_accs'] meta = {} sd_frame = None sd_slots = None timestamp = None # log the latest timestamp for which we've stored data currentTimestamp = -1 # iterate through SPEAD heaps returned by the SPEAD receiver. for heap in spead.iterheaps(rx): ig.update(heap) logger.debug("PROCESSING HEAP idx(%i) cnt(%i) @ %.4f" % (idx, heap.heap_cnt, time.time())) for name in ig.keys(): item = ig.get_item(name) # the item is not marked as changed and we already have a record for it, continue if not item._changed and datasets.has_key(name): continue logger.debug("PROCESSING KEY %s @ %.4f" % (name, time.time())) if name in meta_desired: meta[name] = ig[name] if name in meta_required: meta[name] = ig[name] meta_required.pop(meta_required.index(name)) if len(meta_required) == 0: sd_frame = np.zeros((meta['n_chans'],meta['n_bls'],2),dtype=np.float32) logger.info("Got all required metadata. Initialised sd frame to shape %s"%(str(sd_frame.shape))) meta_required = ['n_chans','bandwidth','n_bls','n_xengs','center_freq','bls_ordering'] ig_sd = spead.ItemGroup() for meta_item in meta_required: ig_sd.add_item( name=ig.get_item(meta_item).name, id=ig.get_item(meta_item).id, description=ig.get_item(meta_item).description, #shape=ig.get_item(meta_item).shape, #fmt=ig.get_item(meta_item).format, init_val=ig.get_item(meta_item).get_value()) tx_sd.send_heap(ig_sd.get_heap()) sd_slots = np.zeros(meta['n_xengs']) if not datasets.has_key(name): # check to see if we have encountered this type before shape = ig[name].shape if item.shape == -1 else item.shape dtype = np.dtype(type(ig[name])) if shape == [] else item.dtype if dtype is None: dtype = ig[name].dtype # if we can't get a dtype from the descriptor, try and get one from the value logger.info("Creating dataset for %s (%s,%s)."%(str(name),str(shape),str(dtype))) f.create_dataset(name,[1] + ([] if list(shape) == [1] else list(shape)), maxshape=[None] + ([] if list(shape) == [1] else list(shape)), dtype=dtype) dump_size += np.multiply.reduce(shape) * dtype.itemsize datasets[name] = f[name] datasets_index[name] = 0 # if we built from an empty descriptor if not item._changed: continue else: logger.info("Adding %s to dataset. New size is %i."%(name,datasets_index[name]+1)) f[name].resize(datasets_index[name]+1, axis=0) # now we store this x engine's data for sending sd data. if sd_frame is not None and name.startswith("xeng_raw"): xeng_id = int(name[8:]) sd_frame[xeng_id::meta['n_xengs']] = ig[name] logger.debug('Received data for Xeng %i @ %.4f' % (xeng_id, time.time())) # we got a timestamp. if sd_frame is not None and name.startswith("timestamp"): xeng_id = int(name[9:]) timestamp = ig['sync_time'] + (ig[name] / ig['scale_factor_timestamp']) #in seconds since unix epoch localTime = time.time() print "Decoded timestamp for Xeng", xeng_id, ":", timestamp, " (", time.ctime(timestamp),") @ %.4f" % localTime, " ", time.ctime(localTime), "diff(", localTime-timestamp, ")" # is this timestamp in the past? if currentTimestamp > timestamp: errorString = "Timestamp %.2f (%s) is earlier than the current timestamp %.2f (%s). Ignoring..." % (timestamp, time.ctime(timestamp), currentTimestamp, time.ctime(currentTimestamp)) logger.warning(errorString) continue # is this a new timestamp before a complete set? if (timestamp > currentTimestamp) and sd_slots.any(): errorString = "New timestamp %.2f from Xeng%i before previous set %.2f sent" % (timestamp, xeng_id, currentTimestamp) logger.warning(errorString) sd_slots = np.zeros(meta['n_xengs']) sd_frame = np.zeros((ig['n_chans'],ig['n_bls'],2),dtype=sd_frame.dtype) currentTimestamp = -1 continue # is this new timestamp in the past for this X engine? if timestamp <= sd_slots[xeng_id]: errorString = 'Xeng%i already on timestamp %.2f but got %.2f now, THIS SHOULD NOT HAPPEN' % (xeng_id, sd_slots[xeng_id], timestamp) logger.error(errorString) raise RuntimeError(errorString) # update our info on which integrations we have sd_slots[xeng_id] = timestamp currentTimestamp = timestamp # do we have integration data and timestamps for all the xengines? If so, send the SD frame. if timestamp is not None and sd_frame is not None and sd_slots is not None and sd_slots.all(): ig_sd = spead.ItemGroup() # make sure we have the right dtype for the sd data ig_sd.add_item(name=('sd_data'), id=(0x3501), description="Combined raw data from all x engines.", ndarray=(sd_frame.dtype,sd_frame.shape)) ig_sd.add_item(name=('sd_timestamp'), id=0x3502, description='Timestamp of this sd frame in centiseconds since epoch (40 bit limitation).', shape=[], fmt=spead.mkfmt(('u',spead.ADDRSIZE))) t_it = ig_sd.get_item('sd_data') logger.info("Added SD frame with shape %s, dtype %s" % (str(t_it.shape),str(t_it.dtype))) scale_factor=(meta['n_accs'] if meta.has_key('n_accs') else 1) logger.info("Sending signal display frame with timestamp %i (%s). %s. @ %.4f" % (timestamp, time.ctime(timestamp), "Unscaled" if not acc_scale else "Scaled by %i" % (scale_factor), time.time())) ig_sd['sd_data'] = sd_frame.astype(np.float32) if not acc_scale else (sd_frame / float(scale_factor)).astype(np.float32) ig_sd['sd_timestamp'] = int(timestamp * 100) tx_sd.send_heap(ig_sd.get_heap()) # reset the arrays that hold integration data sd_slots = np.zeros(meta['n_xengs']) sd_frame = np.zeros((ig['n_chans'],ig['n_bls'],2),dtype=sd_frame.dtype) timestamp = None f[name][datasets_index[name]] = ig[name] datasets_index[name] += 1 item._changed = False idx+=1 logger.info("Got a SPEAD end-of-stream marker. Closing File.") f.flush() f.close() rx.stop() sd_frame = None sd_slots = None ig_sd = None