def test_003_every (self): self.tb = gr.top_block () self.rate = 99999999999 self.interval = 1 self.duration = 4321 self.src = blocks.vector_source_c(list(range(self.duration)), False, 1, []) self.utag = timing_utils.add_usrp_tags_c(1090e6, self.rate, 0, .98765) self.tags = timing_utils.tag_uhd_offset_c(self.rate, self.interval) self.tag_dbg = blocks.tag_debug(gr.sizeof_gr_complex*1, "", ""); self.tag_dbg.set_display(False) self.tb.connect((self.src, 0), (self.utag, 0)) self.tb.connect((self.utag, 0), (self.tags, 0)) self.tb.connect((self.tags, 0), (self.tag_dbg, 0)) e_n_tags = int(ceil(1.0*self.duration / self.interval)) + 3 self.tb.run () tags = self.tag_dbg.current_tags() tprev = None for t in tags: if pmt.eq(t.key, pmt.intern("rx_time_offset")): self.assertAlmostEqual(self.rate, pmt.to_double(pmt.tuple_ref(t.value, 3)),-4) self.assertEqual(t.offset, pmt.to_uint64(pmt.tuple_ref(t.value, 2))) self.assertTrue((pmt.to_uint64(pmt.tuple_ref(t.value, 2)) / (1.0*self.interval)).is_integer()) if tprev is not None: tcur = pmt.to_uint64(pmt.tuple_ref(t.value, 0)) + pmt.to_double(pmt.tuple_ref(t.value, 1)) self.assertAlmostEqual(tcur-tprev, 1.0*self.interval / self.rate) tprev = tcur else: tprev = pmt.to_uint64(pmt.tuple_ref(t.value, 0)) + pmt.to_double(pmt.tuple_ref(t.value, 1)) self.assertEqual(self.tag_dbg.num_tags(), e_n_tags) self.tb = None
def test_003_double_eob_rej_tt_update (self): self.tb = gr.top_block () start_time = 0.0 sob_tag = gr.tag_utils.python_to_tag((51, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag((51+(8*11), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) time_tuple = pmt.make_tuple(pmt.from_uint64(4), pmt.from_double(0.125), pmt.from_uint64(10000000), pmt.from_double(4000000.0)) time_tag = gr.tag_utils.python_to_tag((360, pmt.intern("rx_time"), time_tuple, pmt.intern("src"))) sob_tag2 = gr.tag_utils.python_to_tag((400, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag2e = gr.tag_utils.python_to_tag((409, pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag2 = gr.tag_utils.python_to_tag((416, pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(500), False, 1, [sob_tag, eob_tag, time_tag, sob_tag2, eob_tag2e, eob_tag2]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 1000000, ([]), False, 0, start_time) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec1 = pmt.init_s16vector((8*11), range(51,51+(8*11))) expected_vec2 = pmt.init_s16vector(16, list(range(400,409)) + [0]*7) expected_time1 = start_time + (51 / 1000000.0) expected_time2 = 4.125 + ((400-360) / 1000000.0) self.tb.run () self.assertEqual(dbg.num_messages(), 2) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec1)) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(1)), expected_vec2)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) time_tuple2 = pmt.dict_ref(pmt.car(dbg.get_message(1)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time1) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple2,0)) + pmt.to_double(pmt.tuple_ref(time_tuple2,1)), expected_time2) self.tb = None
def work(self, input_items, output_items): in0 = input_items[0] out = output_items[0] if self.got_fist_tag is not True: rx_rate_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_rate")) rx_time_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_time")) if len(rx_time_tags) > 0: self.got_fist_tag = True # self.rx_rate = mpf(pmt.to_double(rx_rate_tags[0].value)) self.rx_rate = pmt.to_double(rx_rate_tags[0].value) self.offset_prev = rx_time_tags[0].offset # self.rx_time_prev_secs = mpf(pmt.to_uint64(pmt.tuple_ref(rx_time_tags[0].value, 0))) # self.rx_time_prev_frac = mpf(pmt.to_double(pmt.tuple_ref(rx_time_tags[0].value, 1))) self.rx_time_prev_secs = pmt.to_uint64( pmt.tuple_ref(rx_time_tags[0].value, 0)) self.rx_time_prev_frac = pmt.to_double( pmt.tuple_ref(rx_time_tags[0].value, 1)) if len(rx_time_tags) > 1: print "Usupported situation - more than one tag in a single work(..) call" else: rx_time_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_time")) if len(rx_time_tags) > 0: tt = rx_time_tags[0] # print "Offset:",tt.offset," Offset_prev:",self.offset_prev," wartosc:",tt.value #compute number of zeros to add # self.rx_time_secs = mpf(pmt.to_uint64(pmt.tuple_ref(tt.value, 0))) # self.rx_time_frac = mpf(pmt.to_double(pmt.tuple_ref(tt.value, 1))) self.rx_time_secs = pmt.to_uint64(pmt.tuple_ref(tt.value, 0)) self.rx_time_frac = pmt.to_double(pmt.tuple_ref(tt.value, 1)) self.offset = tt.offset diff_offset = self.offset - self.offset_prev diff_offset_real = ( (self.rx_time_secs - self.rx_time_prev_secs) + (self.rx_time_frac - self.rx_time_prev_frac)) * self.rx_rate # print "self.rx_time_secs:",self.rx_time_secs,"self.rx_time_prev_frac:",self.rx_time_prev_frac zeros = diff_offset_real - diff_offset # print "diff_offset_real:",diff_offset_real,"diff_offset:",diff_offset print "Found a gap in the data at offset:", self.offset, " with length:", zeros, " [samps]" #save previous value self.offset_prev = self.offset self.rx_time_prev_secs = self.rx_time_secs self.rx_time_prev_frac = self.rx_time_frac if len(rx_time_tags) > 1: print "Usupported situation - more than one tag in a single work(..) call" out[:] = in0 return len(output_items[0])
def msg_handler(self, p): if self.filename != "": self.fdout = open(self.filename, "a") length = pmt.length(p) if self.key == "all": #if all keys are printed, they need however be printed once above if self.counter == 0: for i in range(0, length): element = pmt.nth(i, p) current_key = str(pmt.nth(0, element)) self.fdout.write(current_key + ",") self.fdout.write("\n") self.counter=1 #print all for i in range(0, length): element = pmt.nth(i, p) current_key = str(pmt.nth(0, element)) current_value = pmt.nth(1, element) if current_key=="rx_time": number = pmt.to_uint64(pmt.tuple_ref(current_value,0)) + \ pmt.to_double(pmt.tuple_ref(current_value,1)) self.fdout.write(str(number) + ",") else: self.fdout.write(str(pmt.f32vector_elements(current_value)[0]) + ",") else: #print all values that correspond to keys for key in self.key: for i in range(0, length): element = pmt.nth(i, p) current_key = str(pmt.nth(0, element)) current_value = pmt.nth(1, element) if current_key == key: if key=="rx_time": number = pmt.to_uint64(pmt.tuple_ref(current_value,0)) + \ pmt.to_double(pmt.tuple_ref(current_value,1)) self.fdout.write(str(number) + ",") else: self.fdout.write(str(pmt.f32vector_elements(current_value)[0]) + ",") self.fdout.write("\n") self.fdout.close()
def dict_from_pdu(self, pdu): # build a simple dictionary out of burst metadata meta = pmt.car(pdu) burst_dict = {} burst_id = -1 try: burst_id = pmt.to_uint64(pmt.dict_ref(meta, self.pmt_burst_id, pmt.PMT_NIL)) burst_dict["start"] = pmt.to_uint64(pmt.dict_ref(meta, self.pmt_start_offset, pmt.PMT_NIL)) burst_dict["end"] = pmt.to_uint64(pmt.dict_ref(meta, self.pmt_end_offset, pmt.PMT_NIL)) burst_dict["rel_cf"] = pmt.to_float(pmt.dict_ref(meta, self.pmt_relative_frequency, pmt.PMT_NIL)) burst_dict["bw"] = pmt.to_float(pmt.dict_ref(meta, self.pmt_bandwidth, pmt.PMT_NIL)) except Exception as e: print(f"malformed burst (red) in the jpeg_convertor, {e}") return None, {} return burst_id, burst_dict
def test_005_two_sobs_misaligned (self): # Two SOB tags and the SOB-to-EOB length is not aligned self.tb = gr.top_block () start_time = 0.1 sob_tag = gr.tag_utils.python_to_tag((34, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag2 = gr.tag_utils.python_to_tag((35, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag((34+(8*31), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(1350), False, 1, [sob_tag, sob_tag2, eob_tag]) #vs = blocks.vector_source_s(range(350), False, 1, [sob_tag, eob_tag]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec = pmt.init_s16vector((8*31), list(range(35,34+(8*31))) + [0]) expected_time = start_time + (35 / 512000.0) self.tb.run () self.assertEqual(dbg.num_messages(), 1) #print "got ", dbg.get_message(0) #print "expected", expected_vec #print "len is {}".format(len(pmt.to_python(pmt.cdr(dbg.get_message(0))))) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time) self.tb = None
def work(self, input_items, output_items): inb = input_items[0] linb = len(inb) gen = self.base.gen_n(linb) tags = self.get_tags_in_window(0, 0, linb, pmt.intern("rx_time")) if tags: tag = tags[-1] rx_time = tag.value seconds = pmt.to_uint64(pmt.tuple_ref(rx_time, 0)) fractional_seconds = pmt.to_double(pmt.tuple_ref(rx_time, 1)) timestamp = seconds + fractional_seconds if self.nbits > 0: ber = self.nerrs / float(self.nbits) #print "NBits: %d \tNErrs: %d \tBER: %.4E, \ttimestamp %f"%(int(self.nbits), int(self.nerrs), ber, timestamp) d = pmt.make_dict() d = pmt.dict_add(d, pmt.intern('timestamp'), pmt.from_double(timestamp)) d = pmt.dict_add(d, pmt.intern('ber'), pmt.from_double(ber)) self.message_port_pub(self.ber_port_id, d) self.nerrs = 0 self.nbits = 0 self.nerrs += numpy.sum(numpy.bitwise_xor(inb, gen)) self.nbits += len(inb) # if self.nbits > 0: # print "NBits: %d \tNErrs: %d \tBER: %.4E"%(int(self.nbits), int(self.nerrs), self.nerrs/self.nbits) return len(inb)
def parse_time_pmt(val, samples_per_second): """Get (sec, frac, idx) from an rx_time pmt value.""" tsec = np.uint64(pmt.to_uint64(pmt.tuple_ref(val, 0))) tfrac = pmt.to_double(pmt.tuple_ref(val, 1)) # calculate sample index of time and floor to uint64 tidx = np.uint64(tsec * samples_per_second + tfrac * samples_per_second) return int(tsec), tfrac, int(tidx)
def checkmsgtime(self, msg, expected_time, expected_sample): sample = pmt.to_uint64(pmt.dict_ref(msg, self.skey, pmt.PMT_NIL)) trig = pmt.dict_ref(msg, self.tkey, pmt.PMT_NIL) trig_int, trig_frac = pmt.to_uint64(pmt.car(trig)), pmt.to_double( pmt.cdr(trig)) trig_time = trig_int + trig_frac self.assertAlmostEqual( float(sample) / self.rate, float(expected_sample) / self.rate, 3, "Incorrect Sample from message: expected {}, received {}".format( expected_sample, sample)) self.assertAlmostEqual( trig_time, expected_time, 3, "Incorrect interrupt time from message: expected {}, received {}". format(expected_time, trig_time))
def handle_command(self, msg): # incoming message will be a dictionary that should contain the items # freq and lo_offset at a minimum - if this is met, issue a command # that can be handled by the freq_xlating_fir_filter_ccf block try: #print "got a message!" # we don't care about the frequency since we are CORDIC tuning lo_offset = pmt.dict_ref(msg, self.dict_key, pmt.PMT_NIL) if not pmt.eqv(lo_offset, pmt.PMT_NIL): offset = pmt.to_python(lo_offset) #print "lo offset is " + repr(offset*-1.0) self.message_port_pub(pmt.intern("freq"), pmt.cons(pmt.intern("freq"), pmt.from_double(-1.0*offset))) #print "published msg, offset = " + repr(-1.0*offset) # if the dictionary has a time value, use it time_tag = pmt.dict_ref(msg, pmt.intern("time"), pmt.PMT_NIL) if not pmt.eqv(time_tag, pmt.PMT_NIL): secs = pmt.to_uint64(pmt.car(time_tag)) - self.origin_time['secs'] frac = pmt.to_double(pmt.cdr(time_tag)) - self.origin_time['frac'] tune_sample = long(secs * self.sample_rate) + long(frac * self.sample_rate) else: tune_sample = TAG_IMMEDIATELY # we will also set the block to tag the output when it is time self.tag_offset = tune_sample self.tag_value = pmt.from_double(-1.0*offset) except Exception as e: print "exception: " + repr(e)
def test_007_max_pdu_size_SOBs (self): # two SOB tags exactly max_pdu_size samples apart self.tb = gr.top_block () start_time = 0.1 max_size = 100 sob_tag = gr.tag_utils.python_to_tag((10, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag3 = gr.tag_utils.python_to_tag((10+max_size, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(1350), False, 1, [sob_tag, sob_tag3]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.set_eob_parameters(10, 0) t2p.set_max_pdu_size(max_size) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec = pmt.init_s16vector((max_size), range(10,10+max_size)) expected_time = start_time + (10 / 512000.0) self.tb.run () # assertions for the first PDU only, second PDU will exist self.assertEqual(dbg.num_messages(), 2) #print "got ", dbg.get_message(0) #print "expected", expected_vec self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time) self.tb = None
def work(self, input_items, output_items): in0 = input_items[0] out = output_items[0] n = len(in0) tags = self.get_tags_in_window(0, 0, n) for tag in tags: key = pmt.symbol_to_string(tag.key) if key == 'start_prs': value = pmt.to_double(tag.value) prs_rx_time = self.monotonic_raw_from_offset(tag.offset) #print "prs @", tag.offset + value, prs_rx_time #print "that was", monotonic() - prs_rx_time, "seconds ago" next_prs = prs_rx_time + 0.096 * (int((monotonic() - prs_rx_time) / 0.096) + 1) #print "next prs @", next_prs next_prs_in = next_prs - monotonic() #print "next prs in", next_prs - monotonic() if next_prs_in > 0.010: self.update_timer(next_prs) self.gated = False elif key == 'rx_time': value = pmt.to_uint64(tag.value) #print "sample", tag.offset, "sampled at", value self.last_rx_time = (tag.offset, value) elif key == 'sync': self.gated = True self.tune_2() out[:] = in0 return len(output_items[0])
def test_001_t (self): # set up fg self.tb.start() # Create N messages with an id field p = pmt.make_dict() num_msgs = 20 for i in range(num_msgs): pc = pmt.dict_add(p,pmt.intern("id"), pmt.from_uint64(i)) self.emitter.emit(pc) # Sleep for a little bit to let the messages finish propagating time.sleep(.05) self.tb.stop() self.tb.wait() msg_count = 0 for i in range(self.num_paths): target = i msg_count += self.debug[i].num_messages() for m in range(self.debug[i].num_messages()): msg = self.debug[i].get_message(m) msg_id = pmt.to_uint64(pmt.dict_ref(msg, pmt.intern("id"), pmt.PMT_NIL)) assert(msg_id == target and msg_id < num_msgs) target += self.num_paths assert(msg_count == num_msgs)
def test_004_boost_time(self): self.tb = gr.top_block() start_time = 0.1 sob_tag = gr.tag_utils.python_to_tag( (34, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag( (34 + (8 * 31), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(350), False, 1, [sob_tag, eob_tag]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.enable_time_debug(True) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() #td = pdu_utils.time_delta("TIME CHECKER") #td = timing_utils.time_delta("TIME CHECKER") self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) #self.tb.msg_connect((t2p, 'pdu_out'), (td, 'pdu_in')) expected_vec = pmt.init_s16vector((8 * 31), range(34, 34 + (8 * 31))) expected_time = start_time + (34 / 512000.0) ts = time.time() self.tb.run() self.assertEqual(dbg.num_messages(), 1) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual( pmt.to_uint64(pmt.tuple_ref(time_tuple1, 0)) + pmt.to_double(pmt.tuple_ref(time_tuple1, 1)), expected_time) #wct = pmt.to_double(pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("wall_clock_time"), pmt.PMT_NIL)) #self.assertTrue((wct - ts) < 1.0) self.tb = None
def test_002_secondSOB(self): self.tb = gr.top_block() start_time = 4.999999999 sob_tag = gr.tag_utils.python_to_tag( (34, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag2 = gr.tag_utils.python_to_tag( (51, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag( (51 + (8 * 26), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(350), False, 1, [sob_tag, sob_tag2, eob_tag]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 460800, ([]), False, 0, start_time) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec = pmt.init_s16vector((8 * 26), range(51, 51 + (8 * 26))) expected_time = start_time + (51 / 460800.0) self.tb.run() self.assertEqual(dbg.num_messages(), 1) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual( pmt.to_uint64(pmt.tuple_ref(time_tuple1, 0)) + pmt.to_double(pmt.tuple_ref(time_tuple1, 1)), expected_time) self.tb = None
def test_003_update_time(self): # This test checks that we can update the hardware time self.tb.start() time.sleep(.01) t0 = .2 self.emitter.emit(self.timemsg(t0, "pair")) time.sleep(.01) start_time = .16 self.utag.update_tags(self.makeTimeDict(start_time)) for i in range(15): if self.msg_dbg.num_messages() == 1: break time.sleep(0.02) # DO NOT call wait!!!! It won't return because the emitter block doesn't have any inputs. self.tb.stop() time.sleep(.01) tag = self.utag.last_tag() sample = pmt.to_uint64( pmt.dict_ref(tag, pmt.intern("rx_sample"), pmt.PMT_NIL)) if (self.msg_dbg.num_messages() != 1): raise Exception("Did not send required messages") self.checkmsgtime(self.msg_dbg.get_message(0), t0, (t0 - start_time) * self.rate + sample)
def test_001_t(self): # set up fg test_len = 1024 packet_len = test_len samp_rate = 2000 center_freq = 1e9 velocity = 15 src = radar.signal_generator_cw_c(packet_len, samp_rate, (0, 0), 1) head = blocks.head(8, test_len) sim = radar.static_target_simulator_cc( (10, 10), (velocity, velocity), (1e9, 1e9), (0, 0), (0, ), samp_rate, center_freq, 1, True, False) mult = blocks.multiply_cc() fft = radar.ts_fft_cc(packet_len) cfar = radar.os_cfar_c(samp_rate, 5, 0, 0.78, 10, True) est = radar.estimator_cw(center_freq) res = radar.print_results() debug = blocks.message_debug() self.tb.connect(src, head, (mult, 1)) self.tb.connect(head, sim, (mult, 0)) self.tb.connect(mult, fft, cfar) self.tb.msg_connect(cfar, 'Msg out', est, 'Msg in') self.tb.msg_connect(est, 'Msg out', res, 'Msg in') self.tb.msg_connect(est, 'Msg out', debug, 'store') #self.tb.msg_connect(est,'Msg out',debug,'print') self.tb.start() sleep(0.5) self.tb.stop() self.tb.wait() # check data msg = debug.get_message(0) self.assertEqual("rx_time", pmt.symbol_to_string(pmt.nth(0, (pmt.nth( 0, msg))))) # check rx_time message part (symbol) self.assertEqual(0, pmt.to_uint64( pmt.tuple_ref(pmt.nth(1, (pmt.nth(0, msg))), 0))) # check rx_time value self.assertEqual( 0.0, pmt.to_double(pmt.tuple_ref(pmt.nth(1, (pmt.nth(0, msg))), 1))) self.assertEqual( "velocity", pmt.symbol_to_string(pmt.nth( 0, (pmt.nth(1, msg))))) # check velocity message part (symbol) self.assertAlmostEqual( 1, velocity / pmt.f32vector_ref(pmt.nth(1, (pmt.nth(1, msg))), 0), 2) # check velocity value
def work(self, input_items, output_items): with self.lock: # print "nitems_read = {}".format(self.nitems_read(0)) in0 = input_items[0] out = output_items[0] noutput_items = len(input_items[0]) nitems_read = self.nitems_read(0) out[:] = in0 # look for time reference in tags tags = self.get_tags_in_window(0,0,noutput_items,self.time_key); if len(tags): # use last tag in window to update reference time try: offset = tags[-1].offset sec = pmt.to_uint64(pmt.tuple_ref(tags[-1].value,0)) frac = pmt.to_double(pmt.tuple_ref(tags[-1].value,1)) self.set_ref_time(offset,sec,frac) except Exception as e: print "invalid tag value: ", repr(e) pass # if there is a tune that needs to be tagged while len(self.tune_commands): (tag_offset,tag_value) = self.tune_commands[0] tag = False if tag_offset is TAG_IMMEDIATELY: offset = nitems_read tag = True elif tag_offset < nitems_read: # time has already elapsed - tag immediatey offset = nitems_read tag = True elif nitems_read <= tag_offset < (nitems_read + noutput_items): # time within current window offset = tag_offset tag = True # tag it if tag: # print "n = {}, offset = {}, value = {}".format(len(self.tune_commands),offset,tag_value) self.add_item_tag(0, offset, self.tag_key, tag_value) self.tune_commands.popleft() # print "length now = {}".format(len(self.tune_commands)) else: # block # print "breaking" break # print "noutput_items = {}".format(noutput_items) return noutput_items
def handler(self, msg): try: meta = pmt.car(msg) sob = pmt.to_uint64(pmt.dict_ref(meta, pmt.intern('start_offset'), pmt.PMT_NIL)) eob = pmt.to_uint64(pmt.dict_ref(meta, pmt.intern('end_offset'), pmt.PMT_NIL)) freq = pmt.to_double(pmt.dict_ref(meta, pmt.intern('center_frequency'), pmt.PMT_NIL)) try: burst = 'burst'+str(pmt.to_uint64(pmt.dict_ref(meta, pmt.intern('burst_id'), pmt.PMT_NIL))) except: burst = '' try: bw = pmt.to_double(pmt.dict_ref(meta, pmt.intern('symbol_rate'), pmt.PMT_NIL)) except: pass try: bw = pmt.to_double(pmt.dict_ref(meta, pmt.intern('bandwidth'), pmt.PMT_NIL)) if bw < self.bw_min: bw = self.bw_min except: pass self.d_dict['annotations'].append({'core:sample_start': sob-self.soo, 'core:sample_count': eob-sob, 'core:freq_upper_edge': int(freq+bw/2), 'core:freq_lower_edge': int(freq-bw/2), 'core:description': burst}) except Exception as e: print('could not form annotation: ',e)
def parse_extra_dict(p, info, VERBOSE=False): if (pmt.is_dict(p) is False): sys.stderr.write( "Extra header is not a PMT dictionary: invalid or corrupt data file.\n" ) sys.exit(1) items = pmt.dict_items(p) nitems = pmt.length(items) for i in xrange(nitems): item = pmt.nth(i, items) key = pmt.symbol_to_string(pmt.car(item)) val = pmt.cdr(item) info[key] = val if (VERBOSE): #print "{0}: {1}".format(key, val) pass return pmt.to_uint64(val)
def handle_msg(self, msg): d_size_msg = pmt.length(msg) # rx_time, frequency, power, phase time_tuple = pmt.nth(1, pmt.nth(0, msg)) time_s = pmt.to_uint64(pmt.tuple_ref(time_tuple, 0)) time_ms = pmt.to_double(pmt.tuple_ref(time_tuple, 1)) timestamp = time_s + time_ms phase_val_vec = pmt.nth(1, pmt.nth(d_size_msg - 1, msg)) phase_val = pmt.f32vector_elements(phase_val_vec)[0] #plt.scatter(timestamp, phase_val) #plt.pause(0.05) #power_val_vec = pmt.nth(1, pmt.nth(d_size_msg-2, msg)) #power_val = pmt.f32vector_elements(power_val_vec)[0] self.phase = phase_val self.time = timestamp
def test_006_max_pdu_size(self): # two SOB tags exactly max_pdu_size samples apart, with an SOB-to-EOB length that is not divisible by the alignment size self.tb = gr.top_block() start_time = 0.1 max_size = 100 sob_tag = gr.tag_utils.python_to_tag( (10, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag( (91, pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag3 = gr.tag_utils.python_to_tag( (11 + max_size, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(1350), False, 1, [sob_tag, eob_tag, sob_tag3]) t2p = pdu.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.set_eob_parameters(10, 0) t2p.set_max_pdu_size(max_size) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdus'), (dbg, 'store')) expected_vec = pmt.init_s16vector((9 * 10), list(range(10, 91)) + [0] * 9) expected_time = start_time + (10 / 512000.0) self.tb.run() # assertions for the first PDU only, second PDU will exist self.assertEqual(dbg.num_messages(), 2) #print "got ", dbg.get_message(0) #print "expected", expected_vec #print "len is {}".format(len(pmt.to_python(pmt.cdr(dbg.get_message(0))))) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("rx_time"), pmt.PMT_NIL) self.assertAlmostEqual( pmt.to_uint64(pmt.tuple_ref(time_tuple1, 0)) + pmt.to_double(pmt.tuple_ref(time_tuple1, 1)), expected_time) self.tb = None
def test21_absolute_serialization_nums(self): # uint64 SERDES in_num = 9999876 in_str = b'\x0b\x00\x00\x00\x00\x00\x98\x96\x04' out_str = pmt.serialize_str(pmt.from_uint64(in_num)) self.assertEqual(out_str, in_str) in_str = b'\x0b\xff\xff\xff\xff\xff\xff\xff\xff' in_num = 0xffffffffffffffff out_num = pmt.to_uint64(pmt.deserialize_str(in_str)) self.assertEqual(out_num, in_num) # long int SERDES in_num = 2432141 in_str = b'\x03\x00%\x1c\x8d' out_str = pmt.serialize_str(pmt.from_long(in_num)) self.assertEqual(out_str, in_str) in_str = b'\x03\xfdy\xe4\xb7' in_num = -42343241 out_num = pmt.to_long(pmt.deserialize_str(in_str)) self.assertEqual(out_num, in_num) # float SERDES in_num = -1.11 in_str = b'\x04\xbf\xf1\xc2\x8f`\x00\x00\x00' out_str = pmt.serialize_str(pmt.from_float(in_num)) self.assertEqual(out_str, in_str) in_str = b'\x04@\x8e\xdd;`\x00\x00\x00' in_num = 987.6539916992188 out_num = pmt.to_float(pmt.deserialize_str(in_str)) self.assertEqual(out_num, in_num) # double SERDES in_num = 987654.321 in_str = b'\x04A.$\x0c\xa4Z\x1c\xac' out_str = pmt.serialize_str(pmt.from_double(in_num)) self.assertEqual(out_str, in_str) in_str = b'\x04\xbf\xdb\x19\x84@A\r\xbc' in_num = -.42343241 out_num = pmt.to_double(pmt.deserialize_str(in_str)) self.assertEqual(out_num, in_num)
def test_001_t (self): # set up fg test_len = 1024 packet_len = test_len samp_rate = 2000 center_freq = 1e9 velocity = 15 src = radar.signal_generator_cw_c(packet_len,samp_rate,(0,0),1) head = blocks.head(8,test_len) sim = radar.static_target_simulator_cc((10,10),(velocity,velocity),(1e9,1e9),(0,0),(0,),samp_rate,center_freq,1,True,False) mult = blocks.multiply_cc() fft = radar.ts_fft_cc(packet_len) cfar = radar.os_cfar_c(samp_rate, 5, 0, 0.78, 10, True) est = radar.estimator_cw(center_freq) res = radar.print_results() debug = blocks.message_debug() self.tb.connect(src,head,(mult,1)) self.tb.connect(head,sim,(mult,0)) self.tb.connect(mult,fft,cfar) self.tb.msg_connect(cfar,'Msg out',est,'Msg in') self.tb.msg_connect(est,'Msg out',res,'Msg in') self.tb.msg_connect(est,'Msg out',debug,'store') #self.tb.msg_connect(est,'Msg out',debug,'print') self.tb.start() sleep(0.5) self.tb.stop() self.tb.wait() # check data msg = debug.get_message(0) self.assertEqual( "rx_time", pmt.symbol_to_string(pmt.nth(0,(pmt.nth(0,msg)))) ) # check rx_time message part (symbol) self.assertEqual( 0, pmt.to_uint64(pmt.tuple_ref(pmt.nth(1,(pmt.nth(0,msg))),0)) ) # check rx_time value self.assertEqual( 0.0, pmt.to_double(pmt.tuple_ref(pmt.nth(1,(pmt.nth(0,msg))),1)) ) self.assertEqual( "velocity", pmt.symbol_to_string(pmt.nth(0,(pmt.nth(1,msg)))) ) # check velocity message part (symbol) self.assertAlmostEqual( 1, velocity/pmt.f32vector_ref(pmt.nth(1,(pmt.nth(1,msg))),0), 2 ) # check velocity value
def handle_command(self, msg): with self.lock: # incoming message will be a dictionary that should contain the items # freq and lo_offset at a minimum - if this is met, issue a command # that can be handled by the freq_xlating_fir_filter_ccf block try: # print "got a message!" # we don't care about the frequency since we are CORDIC tuning lo_offset = pmt.dict_ref(msg, self.dict_key, pmt.PMT_NIL) if not pmt.eqv(lo_offset, pmt.PMT_NIL): offset = pmt.to_python(lo_offset) # print "lo offset is " + repr(offset*-1.0) self.message_port_pub( timing_utils.PMTCONSTSTR__freq(), pmt.cons(timing_utils.PMTCONSTSTR__freq(), pmt.from_double(-1.0 * offset))) # print "published msg, offset = " + repr(-1.0*offset) # if the dictionary has a time value, use it time_tag = pmt.dict_ref(msg, timing_utils.PMTCONSTSTR__time(), pmt.PMT_NIL) if not pmt.eqv(time_tag, pmt.PMT_NIL): secs = pmt.to_uint64( pmt.car(time_tag)) - self.ref_time['secs'] frac = pmt.to_double( pmt.cdr(time_tag)) - self.ref_time['frac'] tune_sample = int(secs * self.sample_rate) + int( frac * self.sample_rate) + self.ref_time['offset'] else: tune_sample = TAG_IMMEDIATELY # we will also set the block to tag the output when it is time if len(self.tune_commands) < self.tune_commands.maxlen: self.tune_commands.append( (tune_sample, pmt.from_double(-1.0 * offset))) except Exception as e: print("exception: " + repr(e))
def update_timestamp(hdr, seg_size): if pmt.dict_has_key(hdr, pmt.string_to_symbol("rx_time")): r = pmt.dict_ref(hdr, pmt.string_to_symbol("rx_time"), pmt.PMT_NIL) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs else: sys.stderr.write("Could not find key 'time': \ invalid or corrupt data file.\n") sys.exit(1) new_hdr = pmt.dict_delete(hdr, pmt.intern("rx_time")) if pmt.dict_has_key(hdr, pmt.intern("rx_rate")): r = pmt.dict_ref(hdr, pmt.intern("rx_rate"), pmt.PMT_NIL) rate = pmt.to_double(r) new_t = t + float(seg_size) / rate new_secs = long(new_t) new_fracs = new_t - new_secs time_val = pmt.make_tuple(pmt.from_uint64(new_secs), pmt.from_double(new_fracs)) new_hdr = pmt.dict_add(new_hdr, pmt.intern("rx_time"), time_val) return new_hdr
def update_timestamp(hdr,seg_size): if pmt.dict_has_key(hdr, pmt.string_to_symbol("rx_time")): r = pmt.dict_ref(hdr, pmt.string_to_symbol("rx_time"), pmt.PMT_NIL) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs else: sys.stderr.write("Could not find key 'time': \ invalid or corrupt data file.\n") sys.exit(1) new_hdr = pmt.dict_delete(hdr, pmt.intern("rx_time")) if pmt.dict_has_key(hdr, pmt.intern("rx_rate")): r = pmt.dict_ref(hdr, pmt.intern("rx_rate"), pmt.PMT_NIL) rate = pmt.to_double(r) new_t = t + float(seg_size)/rate new_secs = long(new_t) new_fracs = new_t - new_secs time_val = pmt.make_tuple(pmt.from_uint64(new_secs), pmt.from_double(new_fracs)) new_hdr = pmt.dict_add(new_hdr, pmt.intern("rx_time"), time_val) return new_hdr
def fft_msg_handler(self, pdu): if not self.need_to_publish or self.fft_ready: # drop messages if we're not currently building an image return # if we are building an image, read the pdu f32 fft data try: meta = pmt.car(pdu) data = pmt.f32vector_elements(pmt.cdr(pdu)) except Exception as e: # should we reset the image data here? print(f"exception in burst_tag_debug, {e}") return # check that this vector is the same size as the rest if not len(data) == self.fft_size: print(f"different length vector received in burst_tag_debug block {len(data)}, resetting") self.image_data = [] self.row_idx = 0 return # save off the starting sample number of this image if self.starting_fft_sample is None: self.starting_fft_sample = pmt.to_uint64(pmt.dict_ref(meta, self.pmt_start_offset, pmt.PMT_NIL)) # looks good, add on the data to the image self.image_data.append(data) self.row_idx += 1 # publish an event followed by an image once it is complete if self.row_idx >= self.nrows: self.fft_ready = True # wait N more seconds for additional burst metadata to make it in N = 1.0 self.publish_timer = threading.Timer(N, self.publish_result) self.publish_timer.start()
def nproduced(self, produced_pmt): nproduced = pmt.to_uint64(produced_pmt) self.nproduced_val = nproduced
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if (pmt.is_dict(p) is False): sys.stderr.write( "Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if (pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if (VERBOSE): print("Version Number: {0}".format(version)) else: sys.stderr.write( "Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if (pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if (VERBOSE): print("Sample Rate: {0:.2f} sps".format(samp_rate)) else: sys.stderr.write( "Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if (pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if (VERBOSE): print("Seconds: {0:.6f}".format(t)) else: sys.stderr.write( "Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if (pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if (VERBOSE): print("Item size: {0}".format(dsize)) else: sys.stderr.write( "Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if (pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if (VERBOSE): print("Data Type: {0} ({1})".format(stype, dtype)) else: sys.stderr.write( "Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if (pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) cplx = pmt.to_bool(r) info["cplx"] = cplx if (VERBOSE): print("Complex? {0}".format(cplx)) else: sys.stderr.write( "Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT WHERE CURRENT SEGMENT STARTS if (pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if (VERBOSE): print("Header Length: {0} bytes".format(info["hdr_len"])) print("Extra Length: {0}".format((info["extra_len"]))) print("Extra Header? {0}".format(info["has_extra"])) else: sys.stderr.write( "Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if (pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes / dsize info["nitems"] = nitems info["nbytes"] = nbytes if (VERBOSE): print("Size of Data: {0} bytes".format(nbytes)) print(" {0} items".format(nitems)) else: sys.stderr.write( "Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if (pmt.is_dict(p) is False): sys.stderr.write( "Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if (pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if (VERBOSE): print "Version Number: {0}".format(version) else: sys.stderr.write( "Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if (pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if (VERBOSE): print "Sample Rate: " + eng_notation.num_to_str(samp_rate) + "SPS" else: sys.stderr.write( "Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if (pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if (VERBOSE): time = datetime.fromtimestamp(t).strftime('%m/%d/%Y %H:%M:%S') print "Timestamp (Unix Epoch): " + time print "Integer Seconds: " + repr(secs) print "Fractional Seconds: " + repr(fracs) #print "Linux Epoch: {0:.6f}".format(t) + " Seconds" else: sys.stderr.write( "Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if (pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if (VERBOSE): print "Item Size: " + eng_notation.num_to_str(dsize) + " Bytes" else: sys.stderr.write( "Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if (pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if (VERBOSE): print "Data Type: {0} ({1})".format(stype, dtype) else: sys.stderr.write( "Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if (pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) #global cplx cplx = pmt.to_bool(r) info["cplx"] = cplx if (VERBOSE): print "Complex? {0}".format(cplx) global vecchk global tsize #print cplx #print dtype #print dsize if (cplx == False): if (dtype == 0): tsize = 1 elif (dtype == 1): tsize = 4 elif (dtype == 2): tsize = 4 elif (dtype == 3): tsize = 4 elif (dtype == 5): tsize = 4 elif (dtype == 6): tsize = 8 else: tsize = 64 #print tsize vecchk = dsize / tsize #print vecchk if (vecchk > 1): print "The data is a vector containing {0} elements.".format( vecchk) else: print "The data is not a vector." '''else: sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) ''' # EXTRACT WHERE CURRENT SEGMENT STARTS if (pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if (VERBOSE): print "Header Length: {0} bytes".format(info["hdr_len"]) print "Extra Length: {0}".format((info["extra_len"])) print "Extra Header? {0}".format(info["has_extra"]) else: sys.stderr.write( "Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if (pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes / dsize info["nitems"] = nitems info["nbytes"] = nbytes #info["types"] = types if (VERBOSE): #print "Size of Data: {0:2.1e} bytes".format(nbytes) print "Segment Size (bytes): " + eng_notation.num_to_str(nbytes) #print " {0:2.1e} items".format(nitems) print "Segment Size (items): " + eng_notation.num_to_str(nitems) else: sys.stderr.write( "Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info
def nproduced(self, produced_pmt): nproduced = pmt.to_uint64(produced_pmt); self.nproduced_val = max(nproduced+self.feedback_delay, self.nproduced_val);
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if(pmt.is_dict(p) is False): sys.stderr.write("Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if(pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if(VERBOSE): print "Version Number: {0}".format(version) else: sys.stderr.write("Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if(VERBOSE): print "Sample Rate: " + eng_notation.num_to_str(samp_rate) + "SPS" else: sys.stderr.write("Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if(VERBOSE): time = datetime.fromtimestamp(t).strftime('%m/%d/%Y %H:%M:%S') print "Timestamp (Unix Epoch): " + time print "Integer Seconds: " + repr(secs) print "Fractional Seconds: " + repr(fracs) #print "Linux Epoch: {0:.6f}".format(t) + " Seconds" else: sys.stderr.write("Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if(pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if(VERBOSE): print "Item Size: " + eng_notation.num_to_str(dsize) + " Bytes" else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if(pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if(VERBOSE): print "Data Type: {0} ({1})".format(stype, dtype) else: sys.stderr.write("Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if(pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) #global cplx cplx = pmt.to_bool(r) info["cplx"] = cplx if(VERBOSE): print "Complex? {0}".format(cplx) global vecchk global tsize #print cplx #print dtype #print dsize if(cplx==False): if(dtype==0): tsize=1 elif(dtype==1): tsize=4 elif(dtype==2): tsize=4 elif(dtype==3): tsize=4 elif(dtype==5): tsize=4 elif(dtype==6): tsize=8 else: tsize=64 #print tsize vecchk = dsize/tsize #print vecchk if(vecchk>1): print "The data is a vector containing {0} elements.".format(vecchk) else: print "The data is not a vector." '''else: sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) ''' # EXTRACT WHERE CURRENT SEGMENT STARTS if(pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if(VERBOSE): print "Header Length: {0} bytes".format(info["hdr_len"]) print "Extra Length: {0}".format((info["extra_len"])) print "Extra Header? {0}".format(info["has_extra"]) else: sys.stderr.write("Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if(pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes/dsize info["nitems"] = nitems info["nbytes"] = nbytes #info["types"] = types if(VERBOSE): #print "Size of Data: {0:2.1e} bytes".format(nbytes) print "Segment Size (bytes): " + eng_notation.num_to_str(nbytes) #print " {0:2.1e} items".format(nitems) print "Segment Size (items): " + eng_notation.num_to_str(nitems) else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info
def test_002_simple(self): # This test processes a single burst and confirms that the resulting metadata is as expected # data min_data_size = 32 * 1024 # arbitrary constant in tagged_burst_to_pdu_impl.h src_data = (1, ) * min_data_size * 8 new_burst_offset = 64 new_burst_dict = pmt.make_dict() new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__burst_id(), pmt.from_uint64(1234)) new_burst_dict = pmt.dict_add( new_burst_dict, fhss_utils.PMTCONSTSTR__relative_frequency(), pmt.from_float(1e6 / 30.72e6)) # in [-1.0,1.0], not Hz new_burst_dict = pmt.dict_add( new_burst_dict, fhss_utils.PMTCONSTSTR__center_frequency(), pmt.from_float(915e6)) # of the whole signal, not the burst new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__magnitude(), pmt.from_float(40)) new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__sample_rate(), pmt.from_float(30.72e6)) new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__noise_density(), pmt.from_float(-100)) # in dBFS/Hz new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__bandwidth(), pmt.from_float(0.1e6)) nb_tag = gr.tag_utils.python_to_tag([ new_burst_offset, fhss_utils.PMTCONSTSTR__new_burst(), new_burst_dict, pmt.intern("qa_test") ]) duration = 1024 gone_burst_offset = new_burst_offset + duration gone_burst_dict = pmt.make_dict() gone_burst_dict = pmt.dict_add(gone_burst_dict, fhss_utils.PMTCONSTSTR__burst_id(), pmt.from_uint64(1234)) gb_tag = gr.tag_utils.python_to_tag([ gone_burst_offset, fhss_utils.PMTCONSTSTR__gone_burst(), gone_burst_dict, pmt.intern("qa_test") ]) # blocks src = blocks.vector_source_c(src_data, False, 1, [nb_tag, gb_tag]) #src.set_min_output_buffer(min_data_size*2) # not necessary, block calls set_output_multiple debug = blocks.message_debug() dec = 16 taps = [1] min_time = 10e-6 # 1024/30.72e6 is about 33 usec max_time = 1.0 nthreads = 3 samp_rate = 30.72e6 rel_span = 1 rel_samp_rate = 1 rel_cf = 0 dut = fhss_utils.tagged_burst_to_pdu(dec, taps, min_time, max_time, rel_cf, rel_span, rel_samp_rate, samp_rate, nthreads) self.tb.connect(src, dut) self.tb.msg_connect((dut, 'cpdus'), (debug, 'store')) #self.tb.run() # blocking, vector_source will end flowgraph self.tb.start() time.sleep(0.1) self.tb.stop() time.sleep(0.1) self.tb.wait() time.sleep(0.1) print("test simple:") #print(f"how many msg? {debug.num_messages()}") self.assertEqual(debug.num_messages(), 1) #print(f"received: {pmt.car(debug.get_message(0))}") #print(f"received: {pmt.cdr(debug.get_message(0))}") rcv_meta = pmt.car(debug.get_message(0)) self.assertAlmostEqual( pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("duration"), pmt.PMT_NIL)), duration / samp_rate, 6) self.assertEqual( pmt.to_uint64( pmt.dict_ref(rcv_meta, pmt.intern("start_offset"), pmt.PMT_NIL)), new_burst_offset) self.assertEqual( pmt.to_uint64( pmt.dict_ref(rcv_meta, pmt.intern("end_offset"), pmt.PMT_NIL)), gone_burst_offset) self.assertEqual( pmt.to_uint64( pmt.dict_ref(rcv_meta, pmt.intern("burst_id"), pmt.PMT_NIL)), 1234) self.assertAlmostEqual( pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("start_time"), pmt.PMT_NIL)), new_burst_offset / samp_rate, 6) self.assertAlmostEqual( pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("sample_rate"), pmt.PMT_NIL)), samp_rate / dec, 6) self.assertAlmostEqual( pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("bandwidth"), pmt.PMT_NIL)), 0.1e6, 1) self.assertAlmostEqual( pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("noise_density"), pmt.PMT_NIL)), -100, 1) self.assertAlmostEqual( pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("magnitude"), pmt.PMT_NIL)), 40, 1) self.assertAlmostEqual( pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("center_frequency"), pmt.PMT_NIL)), 916e6, 0) # center of burst self.assertAlmostEqual( pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("relative_frequency"), pmt.PMT_NIL)), 1e6, 0) # in Hz
def test_005_short_burst(self): # This test processes two bursts of which the first should be dropped for being too short # data min_data_size = 32 * 1024 # arbitrary constant in tagged_burst_to_pdu_impl.h src_data = (1, ) * min_data_size * 8 new_burst_offset = 64 new_burst_dict = pmt.make_dict() new_burst_dict = pmt.dict_add( new_burst_dict, fhss_utils.PMTCONSTSTR__center_frequency(), pmt.from_float(915e6)) # of the whole signal, not the burst new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__magnitude(), pmt.from_float(40)) new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__sample_rate(), pmt.from_float(30.72e6)) new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__noise_density(), pmt.from_float(-100)) # in dBFS/Hz new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__bandwidth(), pmt.from_float(0.1e6)) new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__burst_id(), pmt.from_uint64(111)) new_burst_dict = pmt.dict_add( new_burst_dict, fhss_utils.PMTCONSTSTR__relative_frequency(), pmt.from_float(1e6 / 30.72e6)) # in [-1.0,1.0], not Hz nb_tag_short = gr.tag_utils.python_to_tag([ new_burst_offset, fhss_utils.PMTCONSTSTR__new_burst(), new_burst_dict, pmt.intern("qa_test") ]) new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__burst_id(), pmt.from_uint64(222)) new_burst_dict = pmt.dict_add( new_burst_dict, fhss_utils.PMTCONSTSTR__relative_frequency(), pmt.from_float(2e6 / 30.72e6)) # in [-1.0,1.0], not Hz nb_tag_normal = gr.tag_utils.python_to_tag([ new_burst_offset, fhss_utils.PMTCONSTSTR__new_burst(), new_burst_dict, pmt.intern("qa_test") ]) short_duration = 128 normal_duration = 1024 gone_burst_dict = pmt.make_dict() gone_burst_dict = pmt.dict_add(gone_burst_dict, fhss_utils.PMTCONSTSTR__burst_id(), pmt.from_uint64(111)) gone_burst_offset = new_burst_offset + short_duration gb_tag_short = gr.tag_utils.python_to_tag([ gone_burst_offset, fhss_utils.PMTCONSTSTR__gone_burst(), gone_burst_dict, pmt.intern("qa_test") ]) gone_burst_dict = pmt.dict_add(gone_burst_dict, fhss_utils.PMTCONSTSTR__burst_id(), pmt.from_uint64(222)) gone_burst_offset = new_burst_offset + normal_duration gb_tag_normal = gr.tag_utils.python_to_tag([ gone_burst_offset, fhss_utils.PMTCONSTSTR__gone_burst(), gone_burst_dict, pmt.intern("qa_test") ]) # blocks src = blocks.vector_source_c( src_data, False, 1, [nb_tag_short, nb_tag_normal, gb_tag_short, gb_tag_normal]) #src.set_min_output_buffer(min_data_size*2) # not necessary, block calls set_output_multiple debug = blocks.message_debug() dec = 16 taps = [1] min_time = 5e-6 # 153-ish samples max_time = 1e-3 nthreads = 3 samp_rate = 30.72e6 rel_span = 1 rel_samp_rate = 1 rel_cf = 0 dut = fhss_utils.tagged_burst_to_pdu(dec, taps, min_time, max_time, rel_cf, rel_span, rel_samp_rate, samp_rate, nthreads) self.tb.connect(src, dut) self.tb.msg_connect((dut, 'cpdus'), (debug, 'store')) #self.tb.run() # blocking, vector_source will end flowgraph self.tb.start() time.sleep(0.1) self.tb.stop() time.sleep(0.1) self.tb.wait() time.sleep(0.1) print("test short burst:") print(f"how many msg? {debug.num_messages()}") self.assertEqual(debug.num_messages(), 1) # first message dropped #print(f"received: {pmt.car(debug.get_message(0))}") #print(f"received: {pmt.cdr(debug.get_message(0))}") rcv_meta = pmt.car(debug.get_message(0)) # we expect to not receive burst 111, but to receive burst 222 self.assertEqual( pmt.to_uint64( pmt.dict_ref(rcv_meta, pmt.intern("burst_id"), pmt.PMT_NIL)), 222)
def test_004_long_burst(self): # This test processes a single burst that exceeds the maximum burst size that will be truncated # data min_data_size = 32 * 1024 # arbitrary constant in tagged_burst_to_pdu_impl.h src_data = (1, ) * min_data_size * 8 new_burst_offset = 64 new_burst_dict = pmt.make_dict() new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__burst_id(), pmt.from_uint64(505)) new_burst_dict = pmt.dict_add( new_burst_dict, fhss_utils.PMTCONSTSTR__relative_frequency(), pmt.from_float(1e6 / 30.72e6)) # in [-1.0,1.0], not Hz new_burst_dict = pmt.dict_add( new_burst_dict, fhss_utils.PMTCONSTSTR__center_frequency(), pmt.from_float(915e6)) # of the whole signal, not the burst new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__magnitude(), pmt.from_float(40)) new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__sample_rate(), pmt.from_float(30.72e6)) new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__noise_density(), pmt.from_float(-100)) # in dBFS/Hz new_burst_dict = pmt.dict_add(new_burst_dict, fhss_utils.PMTCONSTSTR__bandwidth(), pmt.from_float(0.1e6)) nb_tag = gr.tag_utils.python_to_tag([ new_burst_offset, fhss_utils.PMTCONSTSTR__new_burst(), new_burst_dict, pmt.intern("qa_test") ]) duration = 1024 gone_burst_offset = new_burst_offset + duration gone_burst_dict = pmt.make_dict() gone_burst_dict = pmt.dict_add(gone_burst_dict, fhss_utils.PMTCONSTSTR__burst_id(), pmt.from_uint64(505)) gb_tag = gr.tag_utils.python_to_tag([ gone_burst_offset, fhss_utils.PMTCONSTSTR__gone_burst(), gone_burst_dict, pmt.intern("qa_test") ]) # blocks src = blocks.vector_source_c(src_data, False, 1, [nb_tag, gb_tag]) #src.set_min_output_buffer(min_data_size*2) # not necessary, block calls set_output_multiple debug = blocks.message_debug() dec = 16 taps = [1] min_time = 1e-6 max_time = 10e-6 nthreads = 3 samp_rate = 30.72e6 rel_span = 1 rel_samp_rate = 1 rel_cf = 0 dut = fhss_utils.tagged_burst_to_pdu(dec, taps, min_time, max_time, rel_cf, rel_span, rel_samp_rate, samp_rate, nthreads) self.tb.connect(src, dut) self.tb.msg_connect((dut, 'cpdus'), (debug, 'store')) #self.tb.run() # blocking, vector_source will end flowgraph self.tb.start() time.sleep(0.1) self.tb.stop() time.sleep(0.1) self.tb.wait() time.sleep(0.1) print("test long burst:") #print(f"how many msg? {debug.num_messages()}") self.assertEqual(debug.num_messages(), 1) #print(f"received: {pmt.car(debug.get_message(0))}") #print(f"received: {pmt.cdr(debug.get_message(0))}") #print(f"received len: {pmt.length(pmt.cdr(debug.get_message(0)))}") rcv_meta = pmt.car(debug.get_message(0)) # we expect a duration equal to `max_time` and a vector of length that corrseponds to `max_time` samples self.assertAlmostEqual( pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("duration"), pmt.PMT_NIL)), max_time, 6) self.assertEqual(pmt.length(pmt.cdr(debug.get_message(0))), (max_time * samp_rate) // dec) self.assertEqual( pmt.to_uint64( pmt.dict_ref(rcv_meta, pmt.intern("burst_id"), pmt.PMT_NIL)), 505)
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if(pmt.is_dict(p) is False): sys.stderr.write("Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if(pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if(VERBOSE): print("Version Number: {0}".format(version)) else: sys.stderr.write("Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if(VERBOSE): print("Sample Rate: {0:.2f} sps".format(samp_rate)) else: sys.stderr.write("Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if(VERBOSE): print("Seconds: {0:.6f}".format(t)) else: sys.stderr.write("Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if(pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if(VERBOSE): print("Item size: {0}".format(dsize)) else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if(pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if(VERBOSE): print("Data Type: {0} ({1})".format(stype, dtype)) else: sys.stderr.write("Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if(pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) cplx = pmt.to_bool(r) info["cplx"] = cplx if(VERBOSE): print("Complex? {0}".format(cplx)) else: sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT WHERE CURRENT SEGMENT STARTS if(pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if(VERBOSE): print("Header Length: {0} bytes".format(info["hdr_len"])) print("Extra Length: {0}".format((info["extra_len"]))) print("Extra Header? {0}".format(info["has_extra"])) else: sys.stderr.write("Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if(pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes / dsize info["nitems"] = nitems info["nbytes"] = nbytes if(VERBOSE): print("Size of Data: {0} bytes".format(nbytes)) print(" {0} items".format(nitems)) else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info