def test_004_boost_time(self): self.tb = gr.top_block() start_time = 0.1 sob_tag = gr.tag_utils.python_to_tag( (34, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag( (34 + (8 * 31), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(350), False, 1, [sob_tag, eob_tag]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.enable_time_debug(True) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() #td = pdu_utils.time_delta("TIME CHECKER") #td = timing_utils.time_delta("TIME CHECKER") self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) #self.tb.msg_connect((t2p, 'pdu_out'), (td, 'pdu_in')) expected_vec = pmt.init_s16vector((8 * 31), range(34, 34 + (8 * 31))) expected_time = start_time + (34 / 512000.0) ts = time.time() self.tb.run() self.assertEqual(dbg.num_messages(), 1) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual( pmt.to_uint64(pmt.tuple_ref(time_tuple1, 0)) + pmt.to_double(pmt.tuple_ref(time_tuple1, 1)), expected_time) #wct = pmt.to_double(pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("wall_clock_time"), pmt.PMT_NIL)) #self.assertTrue((wct - ts) < 1.0) self.tb = None
def msg_handler(self, msg): if pmt.is_tuple(msg): msg_type = pmt.to_long(pmt.tuple_ref(msg, 0)) type_name = rds_message_types[msg_type] msg = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) changed = False if type_name not in self.data: # We haven't seen this field before, add it to the cache if type_name == "alternative_frequencies": # There can be more than one alternative freq, so split self.data[type_name] = set(msg.split(', ')) else: self.data[type_name] = msg changed = True else: # we know this field, let's check if it changed # Alternative frequencies change often, so we keep all of them # (perhaps we should filter those with kHz because they're AM) if type_name == "alternative_frequencies": for frequency in msg.split(', '): if frequency not in self.data[type_name]: # an alternative frequency we haven't seen yet self.data[type_name].add(frequency) changed = True elif self.data[type_name] != msg: # The check is much simpler for any other field changed = True self.data[type_name] = msg if self.callback is not None and changed: # fire callback if we have new data self.callback(type_name, self.data[type_name])
def test_003_double_eob_rej_tt_update (self): self.tb = gr.top_block () start_time = 0.0 sob_tag = gr.tag_utils.python_to_tag((51, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag((51+(8*11), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) time_tuple = pmt.make_tuple(pmt.from_uint64(4), pmt.from_double(0.125), pmt.from_uint64(10000000), pmt.from_double(4000000.0)) time_tag = gr.tag_utils.python_to_tag((360, pmt.intern("rx_time"), time_tuple, pmt.intern("src"))) sob_tag2 = gr.tag_utils.python_to_tag((400, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag2e = gr.tag_utils.python_to_tag((409, pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag2 = gr.tag_utils.python_to_tag((416, pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(500), False, 1, [sob_tag, eob_tag, time_tag, sob_tag2, eob_tag2e, eob_tag2]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 1000000, ([]), False, 0, start_time) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec1 = pmt.init_s16vector((8*11), range(51,51+(8*11))) expected_vec2 = pmt.init_s16vector(16, list(range(400,409)) + [0]*7) expected_time1 = start_time + (51 / 1000000.0) expected_time2 = 4.125 + ((400-360) / 1000000.0) self.tb.run () self.assertEqual(dbg.num_messages(), 2) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec1)) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(1)), expected_vec2)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) time_tuple2 = pmt.dict_ref(pmt.car(dbg.get_message(1)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time1) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple2,0)) + pmt.to_double(pmt.tuple_ref(time_tuple2,1)), expected_time2) self.tb = None
def handle_msg(self, msg): if(pmt.is_tuple(msg)): t = pmt.to_long(pmt.tuple_ref(msg, 0)) m = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) de = DataEvent([t, m]) wx.PostEvent(self.panel, de) del de
def parse_time_pmt(val, samples_per_second): """Get (sec, frac, idx) from an rx_time pmt value.""" tsec = np.uint64(pmt.to_uint64(pmt.tuple_ref(val, 0))) tfrac = pmt.to_double(pmt.tuple_ref(val, 1)) # calculate sample index of time and floor to uint64 tidx = np.uint64(tsec * samples_per_second + tfrac * samples_per_second) return int(tsec), tfrac, int(tidx)
def work(self, input_items, output_items): inb = input_items[0] linb = len(inb) gen = self.base.gen_n(linb) tags = self.get_tags_in_window(0, 0, linb, pmt.intern("rx_time")) if tags: tag = tags[-1] rx_time = tag.value seconds = pmt.to_uint64(pmt.tuple_ref(rx_time, 0)) fractional_seconds = pmt.to_double(pmt.tuple_ref(rx_time, 1)) timestamp = seconds + fractional_seconds if self.nbits > 0: ber = self.nerrs / float(self.nbits) #print "NBits: %d \tNErrs: %d \tBER: %.4E, \ttimestamp %f"%(int(self.nbits), int(self.nerrs), ber, timestamp) d = pmt.make_dict() d = pmt.dict_add(d, pmt.intern('timestamp'), pmt.from_double(timestamp)) d = pmt.dict_add(d, pmt.intern('ber'), pmt.from_double(ber)) self.message_port_pub(self.ber_port_id, d) self.nerrs = 0 self.nbits = 0 self.nerrs += numpy.sum(numpy.bitwise_xor(inb, gen)) self.nbits += len(inb) # if self.nbits > 0: # print "NBits: %d \tNErrs: %d \tBER: %.4E"%(int(self.nbits), int(self.nerrs), self.nerrs/self.nbits) return len(inb)
def test_002_secondSOB(self): self.tb = gr.top_block() start_time = 4.999999999 sob_tag = gr.tag_utils.python_to_tag( (34, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag2 = gr.tag_utils.python_to_tag( (51, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag( (51 + (8 * 26), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(350), False, 1, [sob_tag, sob_tag2, eob_tag]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 460800, ([]), False, 0, start_time) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec = pmt.init_s16vector((8 * 26), range(51, 51 + (8 * 26))) expected_time = start_time + (51 / 460800.0) self.tb.run() self.assertEqual(dbg.num_messages(), 1) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual( pmt.to_uint64(pmt.tuple_ref(time_tuple1, 0)) + pmt.to_double(pmt.tuple_ref(time_tuple1, 1)), expected_time) self.tb = None
def handle_msg(self, msg): if(pmt.is_tuple(msg)): t = pmt.to_long(pmt.tuple_ref(msg, 0)) m = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) if (t==0): #program information msg = unicode(m, errors='replace') self.stationID = msg
def handle_msg(self, msg): f = open("songs.txt", "w") if(pmt.is_tuple(msg)): t = pmt.to_long(pmt.tuple_ref(msg, 0)) m = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) print("message:", m) de = DataEvent([t, m]) f.write(de,"w"); del de f.close()
def handle_msg(self, msg): if (not pmt.is_tuple(msg)): return msg_type = pmt.to_long(pmt.tuple_ref(msg, 0)) msg = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) msg = msg self.msg_signal.emit(msg_type, msg)
def work(self, input_items, output_items): in0 = input_items[0] out = output_items[0] if self.got_fist_tag is not True: rx_rate_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_rate")) rx_time_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_time")) if len(rx_time_tags) > 0: self.got_fist_tag = True # self.rx_rate = mpf(pmt.to_double(rx_rate_tags[0].value)) self.rx_rate = pmt.to_double(rx_rate_tags[0].value) self.offset_prev = rx_time_tags[0].offset # self.rx_time_prev_secs = mpf(pmt.to_uint64(pmt.tuple_ref(rx_time_tags[0].value, 0))) # self.rx_time_prev_frac = mpf(pmt.to_double(pmt.tuple_ref(rx_time_tags[0].value, 1))) self.rx_time_prev_secs = pmt.to_uint64( pmt.tuple_ref(rx_time_tags[0].value, 0)) self.rx_time_prev_frac = pmt.to_double( pmt.tuple_ref(rx_time_tags[0].value, 1)) if len(rx_time_tags) > 1: print "Usupported situation - more than one tag in a single work(..) call" else: rx_time_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_time")) if len(rx_time_tags) > 0: tt = rx_time_tags[0] # print "Offset:",tt.offset," Offset_prev:",self.offset_prev," wartosc:",tt.value #compute number of zeros to add # self.rx_time_secs = mpf(pmt.to_uint64(pmt.tuple_ref(tt.value, 0))) # self.rx_time_frac = mpf(pmt.to_double(pmt.tuple_ref(tt.value, 1))) self.rx_time_secs = pmt.to_uint64(pmt.tuple_ref(tt.value, 0)) self.rx_time_frac = pmt.to_double(pmt.tuple_ref(tt.value, 1)) self.offset = tt.offset diff_offset = self.offset - self.offset_prev diff_offset_real = ( (self.rx_time_secs - self.rx_time_prev_secs) + (self.rx_time_frac - self.rx_time_prev_frac)) * self.rx_rate # print "self.rx_time_secs:",self.rx_time_secs,"self.rx_time_prev_frac:",self.rx_time_prev_frac zeros = diff_offset_real - diff_offset # print "diff_offset_real:",diff_offset_real,"diff_offset:",diff_offset print "Found a gap in the data at offset:", self.offset, " with length:", zeros, " [samps]" #save previous value self.offset_prev = self.offset self.rx_time_prev_secs = self.rx_time_secs self.rx_time_prev_frac = self.rx_time_frac if len(rx_time_tags) > 1: print "Usupported situation - more than one tag in a single work(..) call" out[:] = in0 return len(output_items[0])
def handler(self, rds_data): msg_type = pmt.to_long(pmt.tuple_ref(rds_data, 0)) msg = pmt.symbol_to_string(pmt.tuple_ref(rds_data, 1)) if msg_type == 4: self.radio_text.setText(msg.strip()) elif msg_type == 1: self.station_name.setText(msg) elif msg_type == 0: self.program_info.setText(callsign(msg)) elif msg_type == 3: self.flags.update(msg)
def test_001_t(self): # set up fg test_len = 1024 packet_len = test_len samp_rate = 2000 center_freq = 1e9 velocity = 15 src = radar.signal_generator_cw_c(packet_len, samp_rate, (0, 0), 1) head = blocks.head(8, test_len) sim = radar.static_target_simulator_cc( (10, 10), (velocity, velocity), (1e9, 1e9), (0, 0), (0, ), samp_rate, center_freq, 1, True, False) mult = blocks.multiply_cc() fft = radar.ts_fft_cc(packet_len) cfar = radar.os_cfar_c(samp_rate, 5, 0, 0.78, 10, True) est = radar.estimator_cw(center_freq) res = radar.print_results() debug = blocks.message_debug() self.tb.connect(src, head, (mult, 1)) self.tb.connect(head, sim, (mult, 0)) self.tb.connect(mult, fft, cfar) self.tb.msg_connect(cfar, 'Msg out', est, 'Msg in') self.tb.msg_connect(est, 'Msg out', res, 'Msg in') self.tb.msg_connect(est, 'Msg out', debug, 'store') #self.tb.msg_connect(est,'Msg out',debug,'print') self.tb.start() sleep(0.5) self.tb.stop() self.tb.wait() # check data msg = debug.get_message(0) self.assertEqual("rx_time", pmt.symbol_to_string(pmt.nth(0, (pmt.nth( 0, msg))))) # check rx_time message part (symbol) self.assertEqual(0, pmt.to_uint64( pmt.tuple_ref(pmt.nth(1, (pmt.nth(0, msg))), 0))) # check rx_time value self.assertEqual( 0.0, pmt.to_double(pmt.tuple_ref(pmt.nth(1, (pmt.nth(0, msg))), 1))) self.assertEqual( "velocity", pmt.symbol_to_string(pmt.nth( 0, (pmt.nth(1, msg))))) # check velocity message part (symbol) self.assertAlmostEqual( 1, velocity / pmt.f32vector_ref(pmt.nth(1, (pmt.nth(1, msg))), 0), 2) # check velocity value
def work(self, input_items, output_items): with self.lock: # print "nitems_read = {}".format(self.nitems_read(0)) in0 = input_items[0] out = output_items[0] noutput_items = len(input_items[0]) nitems_read = self.nitems_read(0) out[:] = in0 # look for time reference in tags tags = self.get_tags_in_window(0,0,noutput_items,self.time_key); if len(tags): # use last tag in window to update reference time try: offset = tags[-1].offset sec = pmt.to_uint64(pmt.tuple_ref(tags[-1].value,0)) frac = pmt.to_double(pmt.tuple_ref(tags[-1].value,1)) self.set_ref_time(offset,sec,frac) except Exception as e: print "invalid tag value: ", repr(e) pass # if there is a tune that needs to be tagged while len(self.tune_commands): (tag_offset,tag_value) = self.tune_commands[0] tag = False if tag_offset is TAG_IMMEDIATELY: offset = nitems_read tag = True elif tag_offset < nitems_read: # time has already elapsed - tag immediatey offset = nitems_read tag = True elif nitems_read <= tag_offset < (nitems_read + noutput_items): # time within current window offset = tag_offset tag = True # tag it if tag: # print "n = {}, offset = {}, value = {}".format(len(self.tune_commands),offset,tag_value) self.add_item_tag(0, offset, self.tag_key, tag_value) self.tune_commands.popleft() # print "length now = {}".format(len(self.tune_commands)) else: # block # print "breaking" break # print "noutput_items = {}".format(noutput_items) return noutput_items
def msg_handler(self, p): if self.filename != "": self.fdout = open(self.filename, "a") length = pmt.length(p) if self.key == "all": #if all keys are printed, they need however be printed once above if self.counter == 0: for i in range(0, length): element = pmt.nth(i, p) current_key = str(pmt.nth(0, element)) self.fdout.write(current_key + ",") self.fdout.write("\n") self.counter=1 #print all for i in range(0, length): element = pmt.nth(i, p) current_key = str(pmt.nth(0, element)) current_value = pmt.nth(1, element) if current_key=="rx_time": number = pmt.to_uint64(pmt.tuple_ref(current_value,0)) + \ pmt.to_double(pmt.tuple_ref(current_value,1)) self.fdout.write(str(number) + ",") else: self.fdout.write(str(pmt.f32vector_elements(current_value)[0]) + ",") else: #print all values that correspond to keys for key in self.key: for i in range(0, length): element = pmt.nth(i, p) current_key = str(pmt.nth(0, element)) current_value = pmt.nth(1, element) if current_key == key: if key=="rx_time": number = pmt.to_uint64(pmt.tuple_ref(current_value,0)) + \ pmt.to_double(pmt.tuple_ref(current_value,1)) self.fdout.write(str(number) + ",") else: self.fdout.write(str(pmt.f32vector_elements(current_value)[0]) + ",") self.fdout.write("\n") self.fdout.close()
def test_005_two_sobs_misaligned (self): # Two SOB tags and the SOB-to-EOB length is not aligned self.tb = gr.top_block () start_time = 0.1 sob_tag = gr.tag_utils.python_to_tag((34, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag2 = gr.tag_utils.python_to_tag((35, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag((34+(8*31), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(1350), False, 1, [sob_tag, sob_tag2, eob_tag]) #vs = blocks.vector_source_s(range(350), False, 1, [sob_tag, eob_tag]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec = pmt.init_s16vector((8*31), list(range(35,34+(8*31))) + [0]) expected_time = start_time + (35 / 512000.0) self.tb.run () self.assertEqual(dbg.num_messages(), 1) #print "got ", dbg.get_message(0) #print "expected", expected_vec #print "len is {}".format(len(pmt.to_python(pmt.cdr(dbg.get_message(0))))) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time) self.tb = None
def test_007_max_pdu_size_SOBs (self): # two SOB tags exactly max_pdu_size samples apart self.tb = gr.top_block () start_time = 0.1 max_size = 100 sob_tag = gr.tag_utils.python_to_tag((10, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag3 = gr.tag_utils.python_to_tag((10+max_size, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(1350), False, 1, [sob_tag, sob_tag3]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.set_eob_parameters(10, 0) t2p.set_max_pdu_size(max_size) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec = pmt.init_s16vector((max_size), range(10,10+max_size)) expected_time = start_time + (10 / 512000.0) self.tb.run () # assertions for the first PDU only, second PDU will exist self.assertEqual(dbg.num_messages(), 2) #print "got ", dbg.get_message(0) #print "expected", expected_vec self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time) self.tb = None
def test_003_every (self): self.tb = gr.top_block () self.rate = 99999999999 self.interval = 1 self.duration = 4321 self.src = blocks.vector_source_c(list(range(self.duration)), False, 1, []) self.utag = timing_utils.add_usrp_tags_c(1090e6, self.rate, 0, .98765) self.tags = timing_utils.tag_uhd_offset_c(self.rate, self.interval) self.tag_dbg = blocks.tag_debug(gr.sizeof_gr_complex*1, "", ""); self.tag_dbg.set_display(False) self.tb.connect((self.src, 0), (self.utag, 0)) self.tb.connect((self.utag, 0), (self.tags, 0)) self.tb.connect((self.tags, 0), (self.tag_dbg, 0)) e_n_tags = int(ceil(1.0*self.duration / self.interval)) + 3 self.tb.run () tags = self.tag_dbg.current_tags() tprev = None for t in tags: if pmt.eq(t.key, pmt.intern("rx_time_offset")): self.assertAlmostEqual(self.rate, pmt.to_double(pmt.tuple_ref(t.value, 3)),-4) self.assertEqual(t.offset, pmt.to_uint64(pmt.tuple_ref(t.value, 2))) self.assertTrue((pmt.to_uint64(pmt.tuple_ref(t.value, 2)) / (1.0*self.interval)).is_integer()) if tprev is not None: tcur = pmt.to_uint64(pmt.tuple_ref(t.value, 0)) + pmt.to_double(pmt.tuple_ref(t.value, 1)) self.assertAlmostEqual(tcur-tprev, 1.0*self.interval / self.rate) tprev = tcur else: tprev = pmt.to_uint64(pmt.tuple_ref(t.value, 0)) + pmt.to_double(pmt.tuple_ref(t.value, 1)) self.assertEqual(self.tag_dbg.num_tags(), e_n_tags) self.tb = None
def handle_msg(self, msg, port): t = pmt.to_long(pmt.tuple_ref(msg, 0)) m = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) #code.interact(local=locals()) if (t == 0): self.signals.DataUpdateEvent.emit({ 'col': 0, 'row': port, 'string': m }) #self.PI=m #self.stations[str(port)+"PI"]=m elif (t == 1): self.signals.DataUpdateEvent.emit({ 'col': 2, 'row': port, 'string': m }) #self.PS=m #self.stations[str(port)+"PS"]=m elif (t == 4): self.signals.DataUpdateEvent.emit({ 'col': 5, 'row': port, 'string': m }) #self.RT=m #self.stations[str(port)+"RT"]=m elif (t == 6): #alt freq #print("################alt freqs##################") #freqspmt=pmt.tuple_ref(msg, 1) #print(m) #pp.pprint(pmt.to_python(freqspmt) self.signals.DataUpdateEvent.emit({ 'col': 3, 'row': port, 'string': m }) elif (t == 5): #time self.signals.DataUpdateEvent.emit({ 'col': 4, 'row': port, 'string': m })
def process_measurement(self, msg): if pmt.is_tuple(msg): key = pmt.symbol_to_string(pmt.tuple_ref(msg, 0)) if key == "freq_offset": freq_offset = pmt.to_double(pmt.tuple_ref(msg, 1)) ppm = -freq_offset / self.fc * 1.0e6 state = pmt.symbol_to_string(pmt.tuple_ref(msg, 2)) self.last_state = state if abs(ppm) > 100: #safeguard against flawed measurements ppm = 0 self.reset() if state == "fcch_search": msg_ppm = pmt.from_double(ppm) self.message_port_pub(pmt.intern("ppm"), msg_ppm) self.timer.cancel() self.timer = Timer(0.5, self.timed_reset) self.timer.start() elif state == "synchronized": self.timer.cancel() if self.first_measurement: self.ppm_estimate = ppm self.first_measurement = False else: self.ppm_estimate = ( 1 - self.alfa) * self.ppm_estimate + self.alfa * ppm if self.counter == 5: self.counter = 0 if abs(self.last_ppm_estimate - self.ppm_estimate) > 0.1: msg_ppm = pmt.from_double(ppm) self.message_port_pub(pmt.intern("ppm"), msg_ppm) self.last_ppm_estimate = self.ppm_estimate else: self.counter = self.counter + 1 elif state == "sync_loss": self.reset() msg_ppm = pmt.from_double(0.0) self.message_port_pub(pmt.intern("ppm"), msg_ppm)
def handle_msg(self, msg): d_size_msg = pmt.length(msg) # rx_time, frequency, power, phase time_tuple = pmt.nth(1, pmt.nth(0, msg)) time_s = pmt.to_uint64(pmt.tuple_ref(time_tuple, 0)) time_ms = pmt.to_double(pmt.tuple_ref(time_tuple, 1)) timestamp = time_s + time_ms phase_val_vec = pmt.nth(1, pmt.nth(d_size_msg - 1, msg)) phase_val = pmt.f32vector_elements(phase_val_vec)[0] #plt.scatter(timestamp, phase_val) #plt.pause(0.05) #power_val_vec = pmt.nth(1, pmt.nth(d_size_msg-2, msg)) #power_val = pmt.f32vector_elements(power_val_vec)[0] self.phase = phase_val self.time = timestamp
def handle_msg(self, msg, port): t = pmt.to_long(pmt.tuple_ref(msg, 0)) m = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) #code.interact(local=locals()) if (t == 0): self.PI = m self.stations[str(port) + "PI"] = m elif (t == 1): self.PS = m self.stations[str(port) + "PS"] = m elif (t == 4): self.RT = m self.stations[str(port) + "RT"] = m self.print_count -= 1 #print(self.stations) if (self.print_count == 0): self.print_count = self.print_freq print("########## stations ###########") for key in sorted(self.stations): print("%s: %s" % (key, self.stations[key]))
def process_measurement(self,msg): if pmt.is_tuple(msg): key = pmt.symbol_to_string(pmt.tuple_ref(msg,0)) if key == "freq_offset": freq_offset = pmt.to_double(pmt.tuple_ref(msg,1)) ppm = -freq_offset/self.fc*1.0e6 state = pmt.symbol_to_string(pmt.tuple_ref(msg,2)) self.last_state = state if abs(ppm) > 100: #safeguard against flawed measurements ppm = 0 self.reset() if state == "fcch_search": msg_ppm = pmt.from_double(ppm) self.message_port_pub(pmt.intern("ppm"), msg_ppm) self.timer.cancel() self.timer = Timer(0.5, self.timed_reset) self.timer.start() elif state == "synchronized": self.timer.cancel() if self.first_measurement: self.ppm_estimate = ppm self.first_measurement = False else: self.ppm_estimate = (1-self.alfa)*self.ppm_estimate+self.alfa*ppm if self.counter == 5: self.counter = 0 if abs(self.last_ppm_estimate-self.ppm_estimate) > 0.1: msg_ppm = pmt.from_double(ppm) self.message_port_pub(pmt.intern("ppm"), msg_ppm) self.last_ppm_estimate = self.ppm_estimate else: self.counter=self.counter+1 elif state == "sync_loss": self.reset() msg_ppm = pmt.from_double(0.0) self.message_port_pub(pmt.intern("ppm"), msg_ppm)
def test_001_t (self): # set up fg test_len = 1024 packet_len = test_len samp_rate = 2000 center_freq = 1e9 velocity = 15 src = radar.signal_generator_cw_c(packet_len,samp_rate,(0,0),1) head = blocks.head(8,test_len) sim = radar.static_target_simulator_cc((10,10),(velocity,velocity),(1e9,1e9),(0,0),(0,),samp_rate,center_freq,1,True,False) mult = blocks.multiply_cc() fft = radar.ts_fft_cc(packet_len) cfar = radar.os_cfar_c(samp_rate, 5, 0, 0.78, 10, True) est = radar.estimator_cw(center_freq) res = radar.print_results() debug = blocks.message_debug() self.tb.connect(src,head,(mult,1)) self.tb.connect(head,sim,(mult,0)) self.tb.connect(mult,fft,cfar) self.tb.msg_connect(cfar,'Msg out',est,'Msg in') self.tb.msg_connect(est,'Msg out',res,'Msg in') self.tb.msg_connect(est,'Msg out',debug,'store') #self.tb.msg_connect(est,'Msg out',debug,'print') self.tb.start() sleep(0.5) self.tb.stop() self.tb.wait() # check data msg = debug.get_message(0) self.assertEqual( "rx_time", pmt.symbol_to_string(pmt.nth(0,(pmt.nth(0,msg)))) ) # check rx_time message part (symbol) self.assertEqual( 0, pmt.to_uint64(pmt.tuple_ref(pmt.nth(1,(pmt.nth(0,msg))),0)) ) # check rx_time value self.assertEqual( 0.0, pmt.to_double(pmt.tuple_ref(pmt.nth(1,(pmt.nth(0,msg))),1)) ) self.assertEqual( "velocity", pmt.symbol_to_string(pmt.nth(0,(pmt.nth(1,msg)))) ) # check velocity message part (symbol) self.assertAlmostEqual( 1, velocity/pmt.f32vector_ref(pmt.nth(1,(pmt.nth(1,msg))),0), 2 ) # check velocity value
def test_006_max_pdu_size(self): # two SOB tags exactly max_pdu_size samples apart, with an SOB-to-EOB length that is not divisible by the alignment size self.tb = gr.top_block() start_time = 0.1 max_size = 100 sob_tag = gr.tag_utils.python_to_tag( (10, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag( (91, pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag3 = gr.tag_utils.python_to_tag( (11 + max_size, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(1350), False, 1, [sob_tag, eob_tag, sob_tag3]) t2p = pdu.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.set_eob_parameters(10, 0) t2p.set_max_pdu_size(max_size) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdus'), (dbg, 'store')) expected_vec = pmt.init_s16vector((9 * 10), list(range(10, 91)) + [0] * 9) expected_time = start_time + (10 / 512000.0) self.tb.run() # assertions for the first PDU only, second PDU will exist self.assertEqual(dbg.num_messages(), 2) #print "got ", dbg.get_message(0) #print "expected", expected_vec #print "len is {}".format(len(pmt.to_python(pmt.cdr(dbg.get_message(0))))) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("rx_time"), pmt.PMT_NIL) self.assertAlmostEqual( pmt.to_uint64(pmt.tuple_ref(time_tuple1, 0)) + pmt.to_double(pmt.tuple_ref(time_tuple1, 1)), expected_time) self.tb = None
def update_timestamp(hdr,seg_size): if pmt.dict_has_key(hdr, pmt.string_to_symbol("rx_time")): r = pmt.dict_ref(hdr, pmt.string_to_symbol("rx_time"), pmt.PMT_NIL) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs else: sys.stderr.write("Could not find key 'time': \ invalid or corrupt data file.\n") sys.exit(1) new_hdr = pmt.dict_delete(hdr, pmt.intern("rx_time")) if pmt.dict_has_key(hdr, pmt.intern("rx_rate")): r = pmt.dict_ref(hdr, pmt.intern("rx_rate"), pmt.PMT_NIL) rate = pmt.to_double(r) new_t = t + float(seg_size)/rate new_secs = long(new_t) new_fracs = new_t - new_secs time_val = pmt.make_tuple(pmt.from_uint64(new_secs), pmt.from_double(new_fracs)) new_hdr = pmt.dict_add(new_hdr, pmt.intern("rx_time"), time_val) return new_hdr
def update_timestamp(hdr, seg_size): if pmt.dict_has_key(hdr, pmt.string_to_symbol("rx_time")): r = pmt.dict_ref(hdr, pmt.string_to_symbol("rx_time"), pmt.PMT_NIL) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs else: sys.stderr.write("Could not find key 'time': \ invalid or corrupt data file.\n") sys.exit(1) new_hdr = pmt.dict_delete(hdr, pmt.intern("rx_time")) if pmt.dict_has_key(hdr, pmt.intern("rx_rate")): r = pmt.dict_ref(hdr, pmt.intern("rx_rate"), pmt.PMT_NIL) rate = pmt.to_double(r) new_t = t + float(seg_size) / rate new_secs = long(new_t) new_fracs = new_t - new_secs time_val = pmt.make_tuple(pmt.from_uint64(new_secs), pmt.from_double(new_fracs)) new_hdr = pmt.dict_add(new_hdr, pmt.intern("rx_time"), time_val) return new_hdr
def test_001_t(self): # set up fg fft_len = 256 cp_len = 32 samp_rate = 32000 data = np.random.choice([-1, 1], [100, fft_len]) timefreq = np.fft.ifft(data, axis=0) #add cp timefreq = np.hstack((timefreq[:, -cp_len:], timefreq)) tx = np.reshape(timefreq, (1, -1)) # GR time! src = blocks.vector_source_c(tx[0].tolist(), True, 1, []) analyzer = ofdm_param_estim.ofdm_param_estimation_c( samp_rate, 0, 7000, [128, 256, 512, 1024], [8, 16, 32, 64]) snk = blocks.message_debug() # connect self.tb.connect(src, analyzer) self.tb.msg_connect((analyzer, 'ofdm_out'), (snk, 'store')) self.tb.start() time.sleep(0.25) self.tb.stop() self.tb.wait() # check data result = snk.get_message(0) fft_result = pmt.to_float(pmt.tuple_ref(pmt.tuple_ref(result, 4), 1)) cp_result = pmt.to_float(pmt.tuple_ref(pmt.tuple_ref(result, 5), 1)) self.assertAlmostEqual(fft_len, fft_result) self.assertAlmostEqual(cp_len, cp_result)
def test_001_t (self): # set up fg fft_len = 256 cp_len = 32 samp_rate = 32000 data = np.random.choice([-1, 1], [100, fft_len]) timefreq = np.fft.ifft(data, axis=0) #add cp timefreq = np.hstack((timefreq[:, -cp_len:], timefreq)) tx = np.reshape(timefreq, (1, -1)) # GR time! src = blocks.vector_source_c(tx[0].tolist(), True, 1, []) analyzer = inspector.ofdm_zkf_c(samp_rate, 0, 7000, [128, 256, 512, 1024], [8, 16, 32, 64]) snk = blocks.message_debug() # connect self.tb.connect(src, analyzer) self.tb.msg_connect((analyzer, 'ofdm_out'), (snk, 'store')) self.tb.start() time.sleep(0.25) self.tb.stop() self.tb.wait() # check data result = snk.get_message(0) fft_result = pmt.to_float(pmt.tuple_ref(pmt.tuple_ref(result, 4), 1)) cp_result = pmt.to_float(pmt.tuple_ref(pmt.tuple_ref(result, 5), 1)) self.assertAlmostEqual(fft_len, fft_result) self.assertAlmostEqual(cp_len, cp_result)
help='remote port') args = parser.parse_args() # Socket to talk to server context = zmq.Context() socket = context.socket(zmq.SUB) print 'Collecting updates from radio server at {} port {}...'.format( args.server, args.port) socket.connect('tcp://{}:{}'.format(args.server, args.port)) socket.setsockopt(zmq.SUBSCRIBE, '') data = RBDSData() try: while True: gnr_message_pmt = pmt.deserialize_str(socket.recv()) if pmt.is_tuple(gnr_message_pmt): msg_type = pmt.to_long(pmt.tuple_ref(gnr_message_pmt, 0)) msg = pmt.symbol_to_string(pmt.tuple_ref(gnr_message_pmt, 1)) data.update(msg_type, msg) print ansi_erase_display(2) + repr(data) + ansi_move_to(1, 1) else: print 'Encountered Data I Did Not Understand' except KeyboardInterrupt: print ansi_erase_display(2) + ansi_move_to( 1, 1) + "Shutdown requested...exiting" except Exception: traceback.print_exc(file=sys.stdout) sys.exit(0)
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if(pmt.is_dict(p) is False): sys.stderr.write("Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if(pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if(VERBOSE): print "Version Number: {0}".format(version) else: sys.stderr.write("Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if(VERBOSE): print "Sample Rate: " + eng_notation.num_to_str(samp_rate) + "SPS" else: sys.stderr.write("Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if(VERBOSE): time = datetime.fromtimestamp(t).strftime('%m/%d/%Y %H:%M:%S') print "Timestamp (Unix Epoch): " + time print "Integer Seconds: " + repr(secs) print "Fractional Seconds: " + repr(fracs) #print "Linux Epoch: {0:.6f}".format(t) + " Seconds" else: sys.stderr.write("Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if(pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if(VERBOSE): print "Item Size: " + eng_notation.num_to_str(dsize) + " Bytes" else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if(pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if(VERBOSE): print "Data Type: {0} ({1})".format(stype, dtype) else: sys.stderr.write("Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if(pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) #global cplx cplx = pmt.to_bool(r) info["cplx"] = cplx if(VERBOSE): print "Complex? {0}".format(cplx) global vecchk global tsize #print cplx #print dtype #print dsize if(cplx==False): if(dtype==0): tsize=1 elif(dtype==1): tsize=4 elif(dtype==2): tsize=4 elif(dtype==3): tsize=4 elif(dtype==5): tsize=4 elif(dtype==6): tsize=8 else: tsize=64 #print tsize vecchk = dsize/tsize #print vecchk if(vecchk>1): print "The data is a vector containing {0} elements.".format(vecchk) else: print "The data is not a vector." '''else: sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) ''' # EXTRACT WHERE CURRENT SEGMENT STARTS if(pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if(VERBOSE): print "Header Length: {0} bytes".format(info["hdr_len"]) print "Extra Length: {0}".format((info["extra_len"])) print "Extra Header? {0}".format(info["has_extra"]) else: sys.stderr.write("Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if(pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes/dsize info["nitems"] = nitems info["nbytes"] = nbytes #info["types"] = types if(VERBOSE): #print "Size of Data: {0:2.1e} bytes".format(nbytes) print "Segment Size (bytes): " + eng_notation.num_to_str(nbytes) #print " {0:2.1e} items".format(nitems) print "Segment Size (items): " + eng_notation.num_to_str(nitems) else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info
def __init__(self, fname='', add_metadata=False, metadata_format='', data_type='uint8', precision=0): gr.sync_block.__init__(self, name="csv_writer", in_sig=None, out_sig=None) self.fname = fname self.add_metadata = add_metadata self.metadata_format = metadata_format self.data_type = data_type self.precision = precision self.fid = None # setup logger logger_name = 'gr_log.' + self.to_basic_block().alias() if logger_name in gr.logger_get_names(): self.log = gr.logger(logger_name) else: self.log = gr.logger('log') # metadata field mappings self.metadata_mappings = { 'string': lambda x: pmt.symbol_to_string(x), 'bool': lambda x: pmt.to_bool(x), 'long': lambda x: pmt.to_long(x), 'uint64': lambda x: pmt.to_uint64(x), 'float': lambda x: pmt.to_float(x), 'double': lambda x: pmt.to_double(x), 'complex': lambda x: pmt.to_complex(x), 'time': lambda x: float(pmt.to_uint64(pmt.car(x))) + pmt.to_double( pmt.cdr(x)), 'time_tuple': lambda x: float(pmt.to_uint64(pmt.tuple_ref(x, 0))) + pmt. to_double(pmt.tuple_ref(x, 1)) } # data type parsers self.data_type_mappings = { 'uint8': lambda x: pmt.u8vector_elements(x), 'int8': lambda x: pmt.s8vector_elements(x), 'uint16': lambda x: pmt.u16vector_elements(x), 'int16': lambda x: pmt.s16vector_elements(x), 'uint32': lambda x: pmt.u32vector_elements(x), 'int32': lambda x: pmt.s32vector_elements(x), 'float': lambda x: pmt.f32vector_elements(x), 'complex float': lambda x: pmt.c32vector_elements(x), 'double': lambda x: pmt.f64vector_elements(x), 'complex double': lambda x: pmt.c64vector_elements(x) } # check data type if data_type not in self.data_type_mappings.keys(): raise ValueError('Invalid data type') self.find_metadata = False self.header = [] if self.add_metadata: if self.metadata_format == '': # set flag to load metadata on first message received self.find_metadata = True else: self.parse_header_format() # register message handler self.message_port_name = pmt.intern('in') self.message_port_register_in(self.message_port_name) self.set_msg_handler(self.message_port_name, self.message_handler)
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if (pmt.is_dict(p) is False): sys.stderr.write( "Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if (pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if (VERBOSE): print("Version Number: {0}".format(version)) else: sys.stderr.write( "Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if (pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if (VERBOSE): print("Sample Rate: {0:.2f} sps".format(samp_rate)) else: sys.stderr.write( "Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if (pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if (VERBOSE): print("Seconds: {0:.6f}".format(t)) else: sys.stderr.write( "Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if (pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if (VERBOSE): print("Item size: {0}".format(dsize)) else: sys.stderr.write( "Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if (pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if (VERBOSE): print("Data Type: {0} ({1})".format(stype, dtype)) else: sys.stderr.write( "Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if (pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) cplx = pmt.to_bool(r) info["cplx"] = cplx if (VERBOSE): print("Complex? {0}".format(cplx)) else: sys.stderr.write( "Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT WHERE CURRENT SEGMENT STARTS if (pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if (VERBOSE): print("Header Length: {0} bytes".format(info["hdr_len"])) print("Extra Length: {0}".format((info["extra_len"]))) print("Extra Header? {0}".format(info["has_extra"])) else: sys.stderr.write( "Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if (pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes / dsize info["nitems"] = nitems info["nbytes"] = nbytes if (VERBOSE): print("Size of Data: {0} bytes".format(nbytes)) print(" {0} items".format(nitems)) else: sys.stderr.write( "Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info
def handler(self, msg): if not self.archive: # no need to waste any time return if not pmt.is_dict(msg): return try: # this will fail if message is a PDU with non-PMT_NIL arguments n = pmt.length(pmt.dict_items(msg)) # a PDU with one element equal to PMT_NIL still looks like a # dictionary...grrrrr! if (n == 1) and (pmt.equal(pmt.car(msg),pmt.PMT_NIL) or \ pmt.equal(pmt.cdr(msg),pmt.PMT_NIL)): # treat as a pdu meta = pmt.car(msg) else: # it's a dictionary meta = msg except: try: # message is a pdu pmt.length(pmt.dict_items(pmt.car(msg))) meta = pmt.car(msg) except: return # extract file components try: fname = pmt.dict_ref(meta, self.filename_tag, pmt.PMT_NIL) file_time = pmt.dict_ref(meta, self.time_tag, pmt.PMT_NIL) freq = pmt.dict_ref(meta, self.freq_tag, pmt.PMT_NIL) rate = pmt.dict_ref(meta, self.rate_tag, pmt.PMT_NIL) if pmt.equal(fname, pmt.PMT_NIL): self.log.warn("No file specified") return f = pmt.symbol_to_string(fname) if self.fname_format == "": # copy immediately self.copy_file( f, os.path.join(self.output_path, os.path.basename(f))) else: base_fname = copy.deepcopy(self.fname_format) # add frequency information to file name if not pmt.equal(freq, pmt.PMT_NIL): freq = pmt.to_double(freq) for freq_spec in self.freq_specs: base_fname = base_fname.replace( freq_spec[0], '%0.0f' % int(freq / freq_spec[1])) if not pmt.equal(rate, pmt.PMT_NIL): rate = pmt.to_double(rate) for rate_spec in self.rate_specs: base_fname = base_fname.replace( rate_spec[0], '%0.0f' % int(rate / rate_spec[1])) # time update if not pmt.equal(file_time, pmt.PMT_NIL): t = pmt.to_uint64(pmt.tuple_ref(file_time,0)) + \ pmt.to_double(pmt.tuple_ref(file_time,1)) base_fname = datetime.datetime.utcfromtimestamp( t).strftime(base_fname) # archive file self.copy_file(f, os.path.join(self.output_path, base_fname)) except Exception as e: self.log.error("Unable to process message:{}".format(e))
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if (pmt.is_dict(p) is False): sys.stderr.write( "Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if (pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if (VERBOSE): print "Version Number: {0}".format(version) else: sys.stderr.write( "Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if (pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if (VERBOSE): print "Sample Rate: " + eng_notation.num_to_str(samp_rate) + "SPS" else: sys.stderr.write( "Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if (pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if (VERBOSE): time = datetime.fromtimestamp(t).strftime('%m/%d/%Y %H:%M:%S') print "Timestamp (Unix Epoch): " + time print "Integer Seconds: " + repr(secs) print "Fractional Seconds: " + repr(fracs) #print "Linux Epoch: {0:.6f}".format(t) + " Seconds" else: sys.stderr.write( "Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if (pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if (VERBOSE): print "Item Size: " + eng_notation.num_to_str(dsize) + " Bytes" else: sys.stderr.write( "Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if (pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if (VERBOSE): print "Data Type: {0} ({1})".format(stype, dtype) else: sys.stderr.write( "Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if (pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) #global cplx cplx = pmt.to_bool(r) info["cplx"] = cplx if (VERBOSE): print "Complex? {0}".format(cplx) global vecchk global tsize #print cplx #print dtype #print dsize if (cplx == False): if (dtype == 0): tsize = 1 elif (dtype == 1): tsize = 4 elif (dtype == 2): tsize = 4 elif (dtype == 3): tsize = 4 elif (dtype == 5): tsize = 4 elif (dtype == 6): tsize = 8 else: tsize = 64 #print tsize vecchk = dsize / tsize #print vecchk if (vecchk > 1): print "The data is a vector containing {0} elements.".format( vecchk) else: print "The data is not a vector." '''else: sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) ''' # EXTRACT WHERE CURRENT SEGMENT STARTS if (pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if (VERBOSE): print "Header Length: {0} bytes".format(info["hdr_len"]) print "Extra Length: {0}".format((info["extra_len"])) print "Extra Header? {0}".format(info["has_extra"]) else: sys.stderr.write( "Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if (pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes / dsize info["nitems"] = nitems info["nbytes"] = nbytes #info["types"] = types if (VERBOSE): #print "Size of Data: {0:2.1e} bytes".format(nbytes) print "Segment Size (bytes): " + eng_notation.num_to_str(nbytes) #print " {0:2.1e} items".format(nitems) print "Segment Size (items): " + eng_notation.num_to_str(nitems) else: sys.stderr.write( "Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info
def handle_msg(self, msg): if (pmt.is_tuple(msg)): type = pmt.to_long(pmt.tuple_ref(msg, 0)) message = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) if type == 4: #radio_text self.radio_text = message
parser = argparse.ArgumentParser() parser.add_argument("--server", "-s", default="127.0.0.1", help="remote server") parser.add_argument("--port", "-p", default=6000, type=int, help="remote port") args = parser.parse_args() # Socket to talk to server context = zmq.Context() socket = context.socket(zmq.SUB) print "Collecting updates from radio server at {} port {}...".format(args.server, args.port) socket.connect("tcp://{}:{}".format(args.server, args.port)) socket.setsockopt(zmq.SUBSCRIBE, "") data = RBDSData() try: while True: gnr_message_pmt = pmt.deserialize_str(socket.recv()) if pmt.is_tuple(gnr_message_pmt): msg_type = pmt.to_long(pmt.tuple_ref(gnr_message_pmt, 0)) msg = pmt.symbol_to_string(pmt.tuple_ref(gnr_message_pmt, 1)) data.update(msg_type, msg) print ansi_erase_display(2) + repr(data) + ansi_move_to(1, 1) else: print "Encountered Data I Did Not Understand" except KeyboardInterrupt: print ansi_erase_display(2) + ansi_move_to(1, 1) + "Shutdown requested...exiting" except Exception: traceback.print_exc(file=sys.stdout) sys.exit(0)
def handler(self, pdu): if not pmt.is_pdu(pdu): print('input is not a PDU!, dropping') # there are two basic modes here: tags_to_pdu or fft burst detector # in either case we need to extract the following fields for the annotation: # - sob: start sample of the burst # - eob: end sample of the burst # - freq: center frequency of the burst in hz # - bw: bandwidth of the burst in hz # - b_id: unique Identifier for the burst or `None` # - snr: signal to noise ratio of annotation or 'None' # # how these are obtained differs between the two modes. meta = pmt.car(pdu) time_pmt = pmt.dict_ref(meta, pmt.intern('burst_time'), pmt.PMT_NIL) if pmt.is_tuple(time_pmt): # tags_to_pdu mode try: burst_time = pmt.to_double(pmt.tuple_ref(time_pmt, 1)) + pmt.to_uint64(pmt.tuple_ref(time_pmt, 0)) pdu_rate = pmt.to_double(pmt.dict_ref(meta, pmt.intern('sample_rate'), pmt.from_double(self.rate))) freq = pmt.to_double(pmt.dict_ref(meta, pmt.intern('center_frequency'), pmt.from_double(self.freq))) bw = pmt.to_double(pmt.dict_ref(meta, pmt.intern('bandwidth'), pmt.from_double(self.bw_min))) if bw < self.bw_min: bw = self.bw_min # these can be `None` so use to_python() snr = pmt.to_python(pmt.dict_ref(meta, pmt.intern('snr_db'), pmt.PMT_NIL)) b_id = pmt.to_python(pmt.dict_ref(meta, pmt.intern('pdu_num'), pmt.PMT_NIL)) anno_len = int(pmt.length(pmt.cdr(pdu)) * (self.rate / pdu_rate)) sob = int(self.rate * burst_time) eob = sob + anno_len except Exception as e: print('could not parse required data from message', pmt.car(pdu), ':',e) return else: # fft burst detector mode try: sob = pmt.to_uint64(pmt.dict_ref(meta, pmt.intern('start_offset'), pmt.PMT_NIL)) eob = pmt.to_uint64(pmt.dict_ref(meta, pmt.intern('end_offset'), pmt.PMT_NIL)) freq = pmt.to_double(pmt.dict_ref(meta, pmt.intern('center_frequency'), pmt.PMT_NIL)) bw = pmt.to_double(pmt.dict_ref(meta, pmt.intern('bandwidth'), pmt.from_double(self.bw_min))) if bw < self.bw_min: bw = self.bw_min # these can be `None` so use to_python() snr = pmt.to_python(pmt.dict_ref(meta, pmt.intern('snr_db'), pmt.PMT_NIL)) b_id = pmt.to_python(pmt.dict_ref(meta, pmt.intern('burst_id'), pmt.PMT_NIL)) except Exception as e: print('could not parse required data from message', pmt.car(pdu), ':',e) return label = self.label if self.label == 'use_burst_id': if b_id is None: label = '' else: label = 'burst' + str(b_id) elif self.label == 'use_snr_db': # this probably isnt in here so it will end up blank... label = str(snr) + 'dB' # append the annotation try: if isnan(snr): print("Got illegal SNR value in",meta) self.d_dict['annotations'].append({'core:sample_start': sob-self.soo, 'core:sample_count': eob-sob, 'core:freq_upper_edge': int(freq+bw/2), 'core:freq_lower_edge': int(freq-bw/2), 'core:description': label}) else: self.d_dict['annotations'].append({'core:sample_start': sob-self.soo, 'core:sample_count': eob-sob, 'core:freq_upper_edge': int(freq+bw/2), 'core:freq_lower_edge': int(freq-bw/2), 'core:description': label, 'capture_details:SNRdB': snr}) except Exception as e: print('could not form annotation from message', pmt.car(pdu), ':', e)
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if(pmt.is_dict(p) is False): sys.stderr.write("Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if(pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if(VERBOSE): print("Version Number: {0}".format(version)) else: sys.stderr.write("Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if(VERBOSE): print("Sample Rate: {0:.2f} sps".format(samp_rate)) else: sys.stderr.write("Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if(VERBOSE): print("Seconds: {0:.6f}".format(t)) else: sys.stderr.write("Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if(pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if(VERBOSE): print("Item size: {0}".format(dsize)) else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if(pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if(VERBOSE): print("Data Type: {0} ({1})".format(stype, dtype)) else: sys.stderr.write("Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if(pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) cplx = pmt.to_bool(r) info["cplx"] = cplx if(VERBOSE): print("Complex? {0}".format(cplx)) else: sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT WHERE CURRENT SEGMENT STARTS if(pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if(VERBOSE): print("Header Length: {0} bytes".format(info["hdr_len"])) print("Extra Length: {0}".format((info["extra_len"]))) print("Extra Header? {0}".format(info["has_extra"])) else: sys.stderr.write("Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if(pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes / dsize info["nitems"] = nitems info["nbytes"] = nbytes if(VERBOSE): print("Size of Data: {0} bytes".format(nbytes)) print(" {0} items".format(nitems)) else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info