def test_004_boost_time (self): self.tb = gr.top_block () start_time = 0.1 sob_tag = gr.tag_utils.python_to_tag((34, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag((34+(8*31), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(350), False, 1, [sob_tag, eob_tag]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.enable_time_debug(True) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() td = pdu_utils.time_delta("TIME CHECKER") self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) self.tb.msg_connect((t2p, 'pdu_out'), (td, 'pdu_in')) expected_vec = pmt.init_s16vector((8*31), range(34,34+(8*31))) expected_time = start_time + (34 / 512000.0) ts = time.time() self.tb.run () self.assertEqual(dbg.num_messages(), 1) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time) wct = pmt.to_double(pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("wall_clock_time"), pmt.PMT_NIL)) self.assertTrue((wct - ts) < 1.0) self.tb = None
def test_simple(self): # make the data and expected results data = [x + x * 1j for x in range(10)] pdu_in = pmt.cons(pmt.make_dict(), pmt.init_c32vector(len(data), data)) expected_energy = sum([abs(x)**2 for x in data]) expected_power = 10 * np.log10(expected_energy / len(data)) # run flowgraph self.tb.start() time.sleep(.001) self.emitter.emit(pmt.intern("BAD PDU")) time.sleep(.001) self.emitter.emit(pdu_in) time.sleep(.01) self.tb.stop() self.tb.wait() # don't wait...may not return in time # extract results rcv_pdu = self.debug.get_message(0) rcv_meta = pmt.car(rcv_pdu) rcv_data = pmt.c32vector_elements(pmt.cdr(rcv_pdu)) rcv_energy = pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("energy"), pmt.PMT_NIL)) rcv_power = pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("power"), pmt.PMT_NIL)) # assert expectations precision = 1e-3 self.assertTrue(abs(rcv_energy - expected_energy) < precision) self.assertTrue(abs(rcv_power - expected_power) < precision)
def test_003_double_eob_rej_tt_update (self): self.tb = gr.top_block () start_time = 0.0 sob_tag = gr.tag_utils.python_to_tag((51, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag((51+(8*11), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) time_tuple = pmt.make_tuple(pmt.from_uint64(4), pmt.from_double(0.125), pmt.from_uint64(10000000), pmt.from_double(4000000.0)) time_tag = gr.tag_utils.python_to_tag((360, pmt.intern("rx_time"), time_tuple, pmt.intern("src"))) sob_tag2 = gr.tag_utils.python_to_tag((400, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag2e = gr.tag_utils.python_to_tag((409, pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag2 = gr.tag_utils.python_to_tag((416, pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(500), False, 1, [sob_tag, eob_tag, time_tag, sob_tag2, eob_tag2e, eob_tag2]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 1000000, ([]), False, 0, start_time) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec1 = pmt.init_s16vector((8*11), range(51,51+(8*11))) expected_vec2 = pmt.init_s16vector(16, list(range(400,409)) + [0]*7) expected_time1 = start_time + (51 / 1000000.0) expected_time2 = 4.125 + ((400-360) / 1000000.0) self.tb.run () self.assertEqual(dbg.num_messages(), 2) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec1)) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(1)), expected_vec2)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) time_tuple2 = pmt.dict_ref(pmt.car(dbg.get_message(1)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time1) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple2,0)) + pmt.to_double(pmt.tuple_ref(time_tuple2,1)), expected_time2) self.tb = None
def test_002_normal(self): tnow = time.time() in_data = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1] meta = pmt.dict_add(pmt.make_dict(), pmt.intern( 'system_time'), pmt.from_double(tnow - 10.0)) in_pdu = pmt.cons(meta, pmt.init_c32vector(len(in_data), in_data)) e_data = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1] e_meta = pmt.dict_add(pmt.make_dict(), pmt.intern( 'system_time'), pmt.from_double(tnow)) e_meta = pmt.dict_add(e_meta, pmt.intern( 'sys time delta (ms)'), pmt.from_double(10000.0)) e_pdu = pmt.cons(e_meta, pmt.init_c32vector(len(e_data), e_data)) # set up fg self.tb.start() self.time_delta.to_basic_block()._post(pmt.intern("pdu"), in_pdu) self.waitFor(lambda: self.debug.num_messages() == 1, timeout=1.0, poll_interval=0.01) self.tb.stop() self.tb.wait() # check data self.assertEqual(1, self.debug.num_messages()) a_meta = pmt.car(self.debug.get_message(0)) time_tag = pmt.dict_ref(a_meta, pmt.intern("system_time"), pmt.PMT_NIL) delta_tag = pmt.dict_ref(a_meta, pmt.intern( "sys time delta (ms)"), pmt.PMT_NIL) self.assertAlmostEqual(tnow, pmt.to_double(time_tag), delta=60) self.assertAlmostEqual(10000, pmt.to_double(delta_tag), delta=10)
def test_003_every (self): self.tb = gr.top_block () self.rate = 99999999999 self.interval = 1 self.duration = 4321 self.src = blocks.vector_source_c(list(range(self.duration)), False, 1, []) self.utag = timing_utils.add_usrp_tags_c(1090e6, self.rate, 0, .98765) self.tags = timing_utils.tag_uhd_offset_c(self.rate, self.interval) self.tag_dbg = blocks.tag_debug(gr.sizeof_gr_complex*1, "", ""); self.tag_dbg.set_display(False) self.tb.connect((self.src, 0), (self.utag, 0)) self.tb.connect((self.utag, 0), (self.tags, 0)) self.tb.connect((self.tags, 0), (self.tag_dbg, 0)) e_n_tags = int(ceil(1.0*self.duration / self.interval)) + 3 self.tb.run () tags = self.tag_dbg.current_tags() tprev = None for t in tags: if pmt.eq(t.key, pmt.intern("rx_time_offset")): self.assertAlmostEqual(self.rate, pmt.to_double(pmt.tuple_ref(t.value, 3)),-4) self.assertEqual(t.offset, pmt.to_uint64(pmt.tuple_ref(t.value, 2))) self.assertTrue((pmt.to_uint64(pmt.tuple_ref(t.value, 2)) / (1.0*self.interval)).is_integer()) if tprev is not None: tcur = pmt.to_uint64(pmt.tuple_ref(t.value, 0)) + pmt.to_double(pmt.tuple_ref(t.value, 1)) self.assertAlmostEqual(tcur-tprev, 1.0*self.interval / self.rate) tprev = tcur else: tprev = pmt.to_uint64(pmt.tuple_ref(t.value, 0)) + pmt.to_double(pmt.tuple_ref(t.value, 1)) self.assertEqual(self.tag_dbg.num_tags(), e_n_tags) self.tb = None
def test_002_update(self): start_time = 0.1 self.duration = 125000 self.src = blocks.vector_source_c(list(range(self.duration)), False, 1, []) self.throttle = blocks.throttle(gr.sizeof_gr_complex * 1, 250000) self.utag = timing_utils.add_usrp_tags_c(1090e6, 250000, 0, start_time) self.tag_dbg = blocks.tag_debug(gr.sizeof_gr_complex * 1, '', "") self.tb.connect((self.src, 0), (self.throttle, 0)) self.tb.connect((self.throttle, 0), (self.utag, 0)) self.tb.connect((self.utag, 0), (self.tag_dbg, 0)) self.tb.start() time.sleep(.01) #print("Dumping tags") for t in self.tag_dbg.current_tags(): #print( 'Tag:' , t.key, ' ', t.value ) if pmt.eq(t.key, pmt.intern("rx_freq")): self.assertAlmostEqual(1090e6, pmt.to_double(t.value)) if pmt.eq(t.key, pmt.intern("rx_rate")): self.assertAlmostEqual(250000, pmt.to_double(t.value)) self.utag.update_tags(self.makeDict(freq=1091e6, rate=260000, epoch_int=0, epoch_frac=start_time + .3)) time.sleep(.01) #print("Dumping tags") for t in self.tag_dbg.current_tags(): #print( 'Tag:' , t.key, ' ', t.value ) if pmt.eq(t.key, pmt.intern("rx_freq")): self.assertAlmostEqual(1091e6, pmt.to_double(t.value)) if pmt.eq(t.key, pmt.intern("rx_rate")): self.assertAlmostEqual(260000, pmt.to_double(t.value)) time.sleep(.1) self.tb.stop()
def test_002_timing(self): self.tb.start() self.add_sys_time.to_basic_block()._post(pmt.intern("pdu"), pmt.intern("BAD PDU")) self.add_sys_time.to_basic_block()._post( pmt.intern("pdu"), pmt.cons(pmt.make_dict(), pmt.init_u8vector(1, [0]))) time.sleep( 1.0 ) # wait for one second to provide a time difference between messages self.add_sys_time.to_basic_block()._post( pmt.intern("pdu"), pmt.cons(pmt.make_dict(), pmt.init_u8vector(1, [0]))) self.waitFor(lambda: self.debug.num_messages() == 2, timeout=1.0, poll_interval=0.01) self.tb.stop() self.tb.wait() t0 = pmt.to_double( pmt.dict_ref(pmt.car(self.debug.get_message(0)), pmt.intern("systime"), pmt.from_double(0.0))) t1 = pmt.to_double( pmt.dict_ref(pmt.car(self.debug.get_message(1)), pmt.intern("systime"), pmt.from_double(0.0))) self.assertTrue( ((t1 - t0) - 1) < 0.05) # should be sufficient tolerance
def msg_handler(self, m): if not pmt.is_pair(m): return meta = pmt.car(m) if not pmt.is_dict(meta): return blockstart = pmt.to_long( pmt.dict_ref(meta, pmt.intern('blockstart'), pmt.from_long(-1024))) blockend = pmt.to_long( pmt.dict_ref(meta, pmt.intern('blockend'), pmt.from_long(-1024))) if blockstart == -1024 or blockend == -1024: return rel_cfreq = pmt.to_double( pmt.dict_ref(meta, pmt.intern('rel_cfreq'), pmt.from_double(-1.0))) rel_bw = pmt.to_double( pmt.dict_ref(meta, pmt.intern('rel_bw'), pmt.from_double(-1.0))) if rel_cfreq < 0.0 or rel_bw < 0.0: return blockleft = int(self.normwidth * (rel_cfreq - rel_bw / 2.0)) blockright = int( numpy.ceil(self.normwidth * (rel_cfreq + rel_bw / 2.0))) #print('new msg: {} {} {} {} {} {}'.format(blockstart, blockend, rel_cfreq, rel_bw, blockleft, blockright)) self.msg_puffer += [(blockstart, blockend, blockleft, blockright)]
def work(self, input_items, output_items): in0 = input_items[0] out = output_items[0] if self.got_fist_tag is not True: rx_rate_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_rate")) rx_time_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_time")) if len(rx_time_tags) > 0: self.got_fist_tag = True # self.rx_rate = mpf(pmt.to_double(rx_rate_tags[0].value)) self.rx_rate = pmt.to_double(rx_rate_tags[0].value) self.offset_prev = rx_time_tags[0].offset # self.rx_time_prev_secs = mpf(pmt.to_uint64(pmt.tuple_ref(rx_time_tags[0].value, 0))) # self.rx_time_prev_frac = mpf(pmt.to_double(pmt.tuple_ref(rx_time_tags[0].value, 1))) self.rx_time_prev_secs = pmt.to_uint64( pmt.tuple_ref(rx_time_tags[0].value, 0)) self.rx_time_prev_frac = pmt.to_double( pmt.tuple_ref(rx_time_tags[0].value, 1)) if len(rx_time_tags) > 1: print "Usupported situation - more than one tag in a single work(..) call" else: rx_time_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_time")) if len(rx_time_tags) > 0: tt = rx_time_tags[0] # print "Offset:",tt.offset," Offset_prev:",self.offset_prev," wartosc:",tt.value #compute number of zeros to add # self.rx_time_secs = mpf(pmt.to_uint64(pmt.tuple_ref(tt.value, 0))) # self.rx_time_frac = mpf(pmt.to_double(pmt.tuple_ref(tt.value, 1))) self.rx_time_secs = pmt.to_uint64(pmt.tuple_ref(tt.value, 0)) self.rx_time_frac = pmt.to_double(pmt.tuple_ref(tt.value, 1)) self.offset = tt.offset diff_offset = self.offset - self.offset_prev diff_offset_real = ( (self.rx_time_secs - self.rx_time_prev_secs) + (self.rx_time_frac - self.rx_time_prev_frac)) * self.rx_rate # print "self.rx_time_secs:",self.rx_time_secs,"self.rx_time_prev_frac:",self.rx_time_prev_frac zeros = diff_offset_real - diff_offset # print "diff_offset_real:",diff_offset_real,"diff_offset:",diff_offset print "Found a gap in the data at offset:", self.offset, " with length:", zeros, " [samps]" #save previous value self.offset_prev = self.offset self.rx_time_prev_secs = self.rx_time_secs self.rx_time_prev_frac = self.rx_time_frac if len(rx_time_tags) > 1: print "Usupported situation - more than one tag in a single work(..) call" out[:] = in0 return len(output_items[0])
def test_002_normal(self): tnow = time.time() in_data = [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1 ] meta = pmt.dict_add(pmt.make_dict(), pmt.intern('wall_clock_time'), pmt.from_double(tnow - 10)) in_pdu = pmt.cons(meta, pmt.init_c32vector(len(in_data), in_data)) e_data = [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1 ] e_meta = pmt.dict_add(pmt.make_dict(), pmt.intern('wall_clock_time'), pmt.from_double(tnow)) e_pdu = pmt.cons(e_meta, pmt.init_c32vector(len(e_data), e_data)) # set up fg self.tb.start() time.sleep(.001) self.emitter.emit(in_pdu) time.sleep(.01) self.tb.stop() self.tb.wait() # check data self.assertEqual(1, self.debug.num_messages()) #print("test_002_normal2:") #print("pdu expected: " + repr(pmt.car(e_pdu))) #print("pdu got: " + repr(pmt.car(self.debug.get_message(0)))) #print("data expected: " + repr(pmt.to_python(pmt.cdr(e_pdu)))) #print("data got: " + repr(pmt.to_python(pmt.cdr(self.debug.get_message(0))))) #print a_meta = pmt.car(self.debug.get_message(0)) time_tag = pmt.dict_ref(a_meta, pmt.intern("wall_clock_time"), pmt.PMT_NIL) if not pmt.eqv(time_tag, pmt.PMT_NIL): self.assertAlmostEqual(tnow, pmt.to_double(time_tag), delta=60) else: self.assertTrue(False) delta_tag = pmt.dict_ref(a_meta, pmt.intern("time_delta_ms"), pmt.PMT_NIL) if not pmt.eqv(delta_tag, pmt.PMT_NIL): self.assertAlmostEqual(10000, pmt.to_double(delta_tag), delta=10) else: self.assertTrue(False)
def msg_handler(self, p): if self.filename != "": self.fdout = open(self.filename, "a") length = pmt.length(p) if self.key == "all": #if all keys are printed, they need however be printed once above if self.counter == 0: for i in range(0, length): element = pmt.nth(i, p) current_key = str(pmt.nth(0, element)) self.fdout.write(current_key + ",") self.fdout.write("\n") self.counter=1 #print all for i in range(0, length): element = pmt.nth(i, p) current_key = str(pmt.nth(0, element)) current_value = pmt.nth(1, element) if current_key=="rx_time": number = pmt.to_uint64(pmt.tuple_ref(current_value,0)) + \ pmt.to_double(pmt.tuple_ref(current_value,1)) self.fdout.write(str(number) + ",") else: self.fdout.write(str(pmt.f32vector_elements(current_value)[0]) + ",") else: #print all values that correspond to keys for key in self.key: for i in range(0, length): element = pmt.nth(i, p) current_key = str(pmt.nth(0, element)) current_value = pmt.nth(1, element) if current_key == key: if key=="rx_time": number = pmt.to_uint64(pmt.tuple_ref(current_value,0)) + \ pmt.to_double(pmt.tuple_ref(current_value,1)) self.fdout.write(str(number) + ",") else: self.fdout.write(str(pmt.f32vector_elements(current_value)[0]) + ",") self.fdout.write("\n") self.fdout.close()
def test02(self): const = 123765 x_pmt = pmt.from_double(const) x_int = pmt.to_double(x_pmt) x_float = pmt.to_float(x_pmt) self.assertEqual(x_int, const) self.assertEqual(x_float, const)
def handle_command(self, msg): # incoming message will be a dictionary that should contain the items # freq and lo_offset at a minimum - if this is met, issue a command # that can be handled by the freq_xlating_fir_filter_ccf block try: #print "got a message!" # we don't care about the frequency since we are CORDIC tuning lo_offset = pmt.dict_ref(msg, self.dict_key, pmt.PMT_NIL) if not pmt.eqv(lo_offset, pmt.PMT_NIL): offset = pmt.to_python(lo_offset) #print "lo offset is " + repr(offset*-1.0) self.message_port_pub(pmt.intern("freq"), pmt.cons(pmt.intern("freq"), pmt.from_double(-1.0*offset))) #print "published msg, offset = " + repr(-1.0*offset) # if the dictionary has a time value, use it time_tag = pmt.dict_ref(msg, pmt.intern("time"), pmt.PMT_NIL) if not pmt.eqv(time_tag, pmt.PMT_NIL): secs = pmt.to_uint64(pmt.car(time_tag)) - self.origin_time['secs'] frac = pmt.to_double(pmt.cdr(time_tag)) - self.origin_time['frac'] tune_sample = long(secs * self.sample_rate) + long(frac * self.sample_rate) else: tune_sample = TAG_IMMEDIATELY # we will also set the block to tag the output when it is time self.tag_offset = tune_sample self.tag_value = pmt.from_double(-1.0*offset) except Exception as e: print "exception: " + repr(e)
def test_002_tags(self): start_time = 0.1 self.duration = 125000 tnow = time.time() src_tag = gr.tag_utils.python_to_tag([0, pmt.intern("wall_clock_time"), pmt.from_double(tnow - 10000), pmt.intern("test_002_tags")]) self.src = blocks.vector_source_c(list(range(self.duration)), False, 1, [src_tag]) self.throttle = blocks.throttle(gr.sizeof_gr_complex*1, 250000) self.dut = timing_utils.system_time_diff_c(True, True, False) self.tag_dbg = blocks.tag_debug(gr.sizeof_gr_complex*1, '', ""); self.tb.connect((self.src, 0), (self.throttle, 0)) self.tb.connect((self.throttle, 0), (self.dut, 0)) self.tb.connect((self.dut, 0), (self.tag_dbg, 0)) self.tb.start() time.sleep(.01) tags = self.tag_dbg.current_tags(); print("Dumping tags") for t in tags: print( 'Tag:' , t.key, ' ', t.value ) if pmt.eq(t.key, pmt.intern("wall_clock_time")): time_tag = t; if time_tag: self.assertAlmostEqual( tnow, pmt.to_double(time_tag.value), delta=60 ) else: self.assertTrue( False ) time.sleep(.1) self.tb.stop()
def write_data(self, msg): snr = pmt.to_double(pmt.dict_ref(msg, pmt.intern("snr"), pmt.from_double(0))) encoding = pmt.to_long(pmt.dict_ref(msg, pmt.intern("encoding"), pmt.from_long(0))) time_now = time() * 1000 delay = str(time_now - self.last_time) self.last_time = time_now if self.snr_file != "": f_snr = open(self.snr_file, 'a') f_snr.write(str(snr) + '\n') f_snr.close() if self.enc_file != "": f_enc = open(self.enc_file, 'a') f_enc.write(str(encoding) + '\n') f_enc.close() if self.delay_file != "": f_delay = open(self.delay_file, 'a') f_delay.write(delay + '\n') f_delay.close() if self.debug: print("SNR:" + str(snr)) print("Encoding:" + str(encoding)) print("Delay in millis: " + delay)
def test_002_1tag(self): ''' Tests a stream with a single tag ''' src_tag = gr.tag_utils.python_to_tag([ 0, pmt.intern("sam"), pmt.from_double(10000), pmt.intern("test_002_1tag") ]) src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10) src = blocks.vector_source_i(src_data, False, 1, [src_tag]) dut = sandia_utils.sandia_tag_debug(gr.sizeof_int, "tag QA") self.tb.connect(src, dut) self.tb.run() self.assertEqual(1, dut.num_tags()) tag0 = dut.get_tag(0) self.assertTrue(pmt.eq(tag0.key, pmt.intern("sam"))) self.assertAlmostEqual(10000, pmt.to_double(tag0.value))
def test_005_two_sobs_misaligned (self): # Two SOB tags and the SOB-to-EOB length is not aligned self.tb = gr.top_block () start_time = 0.1 sob_tag = gr.tag_utils.python_to_tag((34, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag2 = gr.tag_utils.python_to_tag((35, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag((34+(8*31), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(1350), False, 1, [sob_tag, sob_tag2, eob_tag]) #vs = blocks.vector_source_s(range(350), False, 1, [sob_tag, eob_tag]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec = pmt.init_s16vector((8*31), list(range(35,34+(8*31))) + [0]) expected_time = start_time + (35 / 512000.0) self.tb.run () self.assertEqual(dbg.num_messages(), 1) #print "got ", dbg.get_message(0) #print "expected", expected_vec #print "len is {}".format(len(pmt.to_python(pmt.cdr(dbg.get_message(0))))) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time) self.tb = None
def parse_time_pmt(val, samples_per_second): """Get (sec, frac, idx) from an rx_time pmt value.""" tsec = np.uint64(pmt.to_uint64(pmt.tuple_ref(val, 0))) tfrac = pmt.to_double(pmt.tuple_ref(val, 1)) # calculate sample index of time and floor to uint64 tidx = np.uint64(tsec * samples_per_second + tfrac * samples_per_second) return int(tsec), tfrac, int(tidx)
def test_002_secondSOB(self): self.tb = gr.top_block() start_time = 4.999999999 sob_tag = gr.tag_utils.python_to_tag( (34, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag2 = gr.tag_utils.python_to_tag( (51, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) eob_tag = gr.tag_utils.python_to_tag( (51 + (8 * 26), pmt.intern("EOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(350), False, 1, [sob_tag, sob_tag2, eob_tag]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 460800, ([]), False, 0, start_time) t2p.set_eob_parameters(8, 0) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec = pmt.init_s16vector((8 * 26), range(51, 51 + (8 * 26))) expected_time = start_time + (51 / 460800.0) self.tb.run() self.assertEqual(dbg.num_messages(), 1) self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual( pmt.to_uint64(pmt.tuple_ref(time_tuple1, 0)) + pmt.to_double(pmt.tuple_ref(time_tuple1, 1)), expected_time) self.tb = None
def work(self, input_items, output_items): in0 = input_items[0] out = output_items[0] n = len(in0) tags = self.get_tags_in_window(0, 0, n) for tag in tags: key = pmt.symbol_to_string(tag.key) if key == 'start_prs': value = pmt.to_double(tag.value) prs_rx_time = self.monotonic_raw_from_offset(tag.offset) #print "prs @", tag.offset + value, prs_rx_time #print "that was", monotonic() - prs_rx_time, "seconds ago" next_prs = prs_rx_time + 0.096 * (int((monotonic() - prs_rx_time) / 0.096) + 1) #print "next prs @", next_prs next_prs_in = next_prs - monotonic() #print "next prs in", next_prs - monotonic() if next_prs_in > 0.010: self.update_timer(next_prs) self.gated = False elif key == 'rx_time': value = pmt.to_uint64(tag.value) #print "sample", tag.offset, "sampled at", value self.last_rx_time = (tag.offset, value) elif key == 'sync': self.gated = True self.tune_2() out[:] = in0 return len(output_items[0])
def test_007_max_pdu_size_SOBs (self): # two SOB tags exactly max_pdu_size samples apart self.tb = gr.top_block () start_time = 0.1 max_size = 100 sob_tag = gr.tag_utils.python_to_tag((10, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) sob_tag3 = gr.tag_utils.python_to_tag((10+max_size, pmt.intern("SOB"), pmt.PMT_T, pmt.intern("src"))) vs = blocks.vector_source_s(range(1350), False, 1, [sob_tag, sob_tag3]) t2p = pdu_utils.tags_to_pdu_s(pmt.intern('SOB'), pmt.intern('EOB'), 1024, 512000, ([]), False, 0, start_time) t2p.set_eob_parameters(10, 0) t2p.set_max_pdu_size(max_size) dbg = blocks.message_debug() self.tb.connect(vs, t2p) self.tb.msg_connect((t2p, 'pdu_out'), (dbg, 'store')) expected_vec = pmt.init_s16vector((max_size), range(10,10+max_size)) expected_time = start_time + (10 / 512000.0) self.tb.run () # assertions for the first PDU only, second PDU will exist self.assertEqual(dbg.num_messages(), 2) #print "got ", dbg.get_message(0) #print "expected", expected_vec self.assertTrue(pmt.equal(pmt.cdr(dbg.get_message(0)), expected_vec)) time_tuple1 = pmt.dict_ref(pmt.car(dbg.get_message(0)), pmt.intern("burst_time"), pmt.PMT_NIL) self.assertAlmostEqual(pmt.to_uint64(pmt.tuple_ref(time_tuple1,0)) + pmt.to_double(pmt.tuple_ref(time_tuple1,1)), expected_time) self.tb = None
def XX_test_002_tag_immediately(self): # tune message tune = pmt.dict_add(pmt.make_dict(), pmt.intern('freq'), pmt.from_double(100)) # blocks src = blocks.null_source(gr.sizeof_gr_complex * 1) throttle = blocks.throttle(gr.sizeof_gr_complex * 1, 32000, True) retuner = timing_utils.timed_tag_retuner(1e6, pmt.intern("freq"), 1, 0.1) debug = sandia_utils.sandia_tag_debug(gr.sizeof_gr_complex * 1, '', "", True) emitter = pdu_utils.message_emitter() debug.set_display(True) self.tb.connect(src, throttle) self.tb.connect(throttle, retuner) self.tb.connect(retuner, debug) self.tb.msg_connect((emitter, 'msg'), (retuner, 'command')) self.tb.start() time.sleep(.1) emitter.emit(tune) time.sleep(.1) self.tb.stop() # assert self.assertEqual(debug.num_tags(), 1) tag = debug.get_tag(0) self.assertTrue(pmt.equal(tag.key, pmt.intern('set_freq'))) freq = pmt.to_double(tag.value) self.assertAlmostEqual(-100, freq)
def work(self, input_items, output_items): inb = input_items[0] linb = len(inb) gen = self.base.gen_n(linb) tags = self.get_tags_in_window(0, 0, linb, pmt.intern("rx_time")) if tags: tag = tags[-1] rx_time = tag.value seconds = pmt.to_uint64(pmt.tuple_ref(rx_time, 0)) fractional_seconds = pmt.to_double(pmt.tuple_ref(rx_time, 1)) timestamp = seconds + fractional_seconds if self.nbits > 0: ber = self.nerrs / float(self.nbits) #print "NBits: %d \tNErrs: %d \tBER: %.4E, \ttimestamp %f"%(int(self.nbits), int(self.nerrs), ber, timestamp) d = pmt.make_dict() d = pmt.dict_add(d, pmt.intern('timestamp'), pmt.from_double(timestamp)) d = pmt.dict_add(d, pmt.intern('ber'), pmt.from_double(ber)) self.message_port_pub(self.ber_port_id, d) self.nerrs = 0 self.nbits = 0 self.nerrs += numpy.sum(numpy.bitwise_xor(inb, gen)) self.nbits += len(inb) # if self.nbits > 0: # print "NBits: %d \tNErrs: %d \tBER: %.4E"%(int(self.nbits), int(self.nerrs), self.nerrs/self.nbits) return len(inb)
def test_001(self): # We're using a really simple preamble so that the correlation # is straight forward. preamble = [0, 0, 0, 1, 0, 0, 0] # Our pulse shape has this width (in units of symbols). pulse_width = 1.5 # The number of filters to use for resampling. n_filters = 12 sps = 3 data = [0]*10 + preamble + [0]*40 src = blocks.vector_source_c(data) # We want to generate taps with a sampling rate of sps=n_filters for resampling # purposes. pulse_shape = make_parabolic_pulse_shape(sps=n_filters, N=0.5, scale=35) # Create our resampling filter to generate the data for the correlator. shape = filter.pfb_arb_resampler_ccf(sps, pulse_shape, n_filters) # Generate the correlator block itself. correlator = digital.correlate_and_sync_cc(preamble, pulse_shape, sps, n_filters) # Connect it all up and go. snk = blocks.vector_sink_c() null = blocks.null_sink(gr.sizeof_gr_complex) tb = gr.top_block() tb.connect(src, shape, correlator, snk) tb.connect((correlator, 1), null) tb.run() # Look at the tags. Retrieve the timing offset. data = snk.data() offset = None timing_error = None for tag in snk.tags(): key = pmt.symbol_to_string(tag.key) if key == "time_est": offset = tag.offset timing_error = pmt.to_double(tag.value) if offset is None: raise ValueError("No tags found.") # Detect where the middle of the preamble is. # Assume we have only one peak and that it is symmetric. sum_id = 0 sum_d = 0 for i, d in enumerate(data): sum_id += i*abs(d) sum_d += abs(d) data_i = sum_id/sum_d if offset is not None: diff = data_i-offset remainder = -(diff%sps) if remainder < -sps/2.0: remainder += sps tol = 0.2 difference = timing_error - remainder difference = difference % sps if abs(difference) >= tol: print("Tag gives timing estimate of {0}. QA calculates it as {1}. Tolerance is {2}".format(timing_error, remainder, tol)) self.assertTrue(abs(difference) < tol)
def test_monte_carlo(self): emitter = pdu_utils.message_emitter() clock_rec = pdu_utils.pdu_clock_recovery(True) msg_debug = blocks.message_debug() # make connections self.tb.msg_connect((emitter, 'msg'), (clock_rec, 'pdu_in')) self.tb.msg_connect((clock_rec, 'pdu_out'), (msg_debug, 'store')) # run self.tb.start() time.sleep(.05) # generate and emit for i in range(100): n_symbols = 100 sps = 8 noise_power = 0.02 * i original_bits = np.random.randint(0, 2, n_symbols) original_samples = original_bits * 2 - 1 sample_rate = 1e6 symbol_rate = sample_rate / sps data = np.repeat(original_samples, sps) + ( np.random.rand(n_symbols * sps) * np.sqrt(noise_power)) meta = pmt.make_dict() meta = pmt.dict_add(meta, self.pmt_sample_rate, pmt.from_double(1e6)) vector = pmt.init_f32vector(len(data), data) emitter.emit(pmt.cons(meta, vector)) time.sleep(.05) result = msg_debug.get_message(i) result_meta = pmt.car(result) result_vector = pmt.to_python(pmt.cdr(result)) n_errors = sum(original_bits[:len(result_vector)] ^ result_vector[:len(original_bits)]) result_rate = pmt.to_double( pmt.dict_ref(result_meta, self.pmt_symbol_rate, pmt.PMT_NIL)) #print("result is ", result_rate) #print("we expected ", symbol_rate) #print("result vector is", result_vector) #print("we expected ", original_bits) #print("num errors", n_errors) # assert some stuff if n_errors != 0: print("got bad data", i) if (result_rate - symbol_rate) > 100: print("got bad rate", i) # shut down self.tb.stop() self.tb.wait() self.assertTrue(True)
def test_003_tags(self): ''' Tests a stream that has multiple tags inside it ''' src_tag1 = gr.tag_utils.python_to_tag([ 0, pmt.intern("sam"), pmt.from_double(10000), pmt.intern("test_003_tags") ]) src_tag2 = gr.tag_utils.python_to_tag([ 1, pmt.intern("peter"), pmt.from_double(1000), pmt.intern("test_003_tags") ]) src_tag3 = gr.tag_utils.python_to_tag([ 2, pmt.intern("jacob"), pmt.from_double(100), pmt.intern("test_003_tags") ]) src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10) src = blocks.vector_source_i(src_data, False, 1, [src_tag1, src_tag2, src_tag3]) dut = sandia_utils.sandia_tag_debug(gr.sizeof_int, "tag QA") self.tb.connect(src, dut) self.tb.run() self.assertEqual(3, dut.num_tags()) tag0 = dut.get_tag(0) tag1 = dut.get_tag(1) tag2 = dut.get_tag(2) self.assertTrue(pmt.eq(tag0.key, pmt.intern("sam"))) self.assertAlmostEqual(10000, pmt.to_double(tag0.value)) self.assertTrue(pmt.eq(tag1.key, pmt.intern("peter"))) self.assertAlmostEqual(1000, pmt.to_double(tag1.value)) self.assertTrue(pmt.eq(tag2.key, pmt.intern("jacob"))) self.assertAlmostEqual(100, pmt.to_double(tag2.value))
def test_001_timestamp (self): ''' Check correct timestamp tags ''' self.tb.run() output_data = self.dst.data() output_tags = self.dst.tags() # Extract timestamp tags timestamp_tags = [] for tag in output_tags: if pmt.to_python(tag.key) == "timestamp": timestamp_tags.append(tag) # Timestamp tags at index 12, 15 self.assertEqual(len(timestamp_tags), 2) self.assertAlmostEqual(pmt.to_double(timestamp_tags[0].value), 12.0) self.assertAlmostEqual(pmt.to_double(timestamp_tags[1].value), 15.0)
def test_timing(self): self.tb.start() self.emitter.emit(pmt.intern("BAD PDU")) time.sleep(.01) self.emitter.emit(pmt.cons(pmt.make_dict(), pmt.init_u8vector(1, [0]))) time.sleep(1.0) self.emitter.emit(pmt.cons(pmt.make_dict(), pmt.init_u8vector(1, [0]))) time.sleep(.05) self.tb.stop() self.tb.wait() t0 = pmt.to_double(pmt.dict_ref(pmt.car(self.debug.get_message(0)), pmt.intern("wall_clock_time"), pmt.from_double(0.0))) t1 = pmt.to_double(pmt.dict_ref(pmt.car(self.debug.get_message(1)), pmt.intern("wall_clock_time"), pmt.from_double(0.0))) self.assertTrue(((t1 - t0) - 1) < 0.05)
def from_pmt(cls, pmt_elem): # assert pmt.dict_has_key(pmt_elem,pmt.intern('tidx')) tidx = pmt.to_long( pmt.dict_ref(pmt_elem, pmt.intern('tidx'), pmt.PMT_NIL)) xcorr = pmt.to_double( pmt.dict_ref(pmt_elem, pmt.intern('xcorr'), pmt.PMT_NIL)) xautocorr = pmt.to_double( pmt.dict_ref(pmt_elem, pmt.intern('xautocorr'), pmt.PMT_NIL)) cfo = pmt.to_double( pmt.dict_ref(pmt_elem, pmt.intern('cfo'), pmt.PMT_NIL)) preamble_mag2 = pmt.to_double( pmt.dict_ref(pmt_elem, pmt.intern('preamble_mag2'), pmt.PMT_NIL)) awgn_mag2_nodc = pmt.to_double( pmt.dict_ref(pmt_elem, pmt.intern('awgn_mag2_nodc'), pmt.PMT_NIL)) dc_offset = pmt.to_complex( pmt.dict_ref(pmt_elem, pmt.intern('dc_offset'), pmt.PMT_NIL)) return cls(tidx, xcorr, xautocorr, cfo, preamble_mag2, awgn_mag2_nodc, dc_offset)
def _assert_tags(self, expected_values, expected_offsets): """Check the tags received by the tag debug block""" tags = self.tag_sink.current_tags() expected_tags = list(zip(expected_values, expected_offsets)) self.assertEqual(len(tags), len(expected_tags)) for idx, (val, offset) in enumerate(expected_tags): self.assertAlmostEqual(pmt.to_double(tags[idx].value), val, places=5) self.assertEqual(tags[idx].offset, offset)
def test_001_t(self): # set up fg test_len = 1024 packet_len = test_len samp_rate = 2000 center_freq = 1e9 velocity = 15 src = radar.signal_generator_cw_c(packet_len, samp_rate, (0, 0), 1) head = blocks.head(8, test_len) sim = radar.static_target_simulator_cc( (10, 10), (velocity, velocity), (1e9, 1e9), (0, 0), (0, ), samp_rate, center_freq, 1, True, False) mult = blocks.multiply_cc() fft = radar.ts_fft_cc(packet_len) cfar = radar.os_cfar_c(samp_rate, 5, 0, 0.78, 10, True) est = radar.estimator_cw(center_freq) res = radar.print_results() debug = blocks.message_debug() self.tb.connect(src, head, (mult, 1)) self.tb.connect(head, sim, (mult, 0)) self.tb.connect(mult, fft, cfar) self.tb.msg_connect(cfar, 'Msg out', est, 'Msg in') self.tb.msg_connect(est, 'Msg out', res, 'Msg in') self.tb.msg_connect(est, 'Msg out', debug, 'store') #self.tb.msg_connect(est,'Msg out',debug,'print') self.tb.start() sleep(0.5) self.tb.stop() self.tb.wait() # check data msg = debug.get_message(0) self.assertEqual("rx_time", pmt.symbol_to_string(pmt.nth(0, (pmt.nth( 0, msg))))) # check rx_time message part (symbol) self.assertEqual(0, pmt.to_uint64( pmt.tuple_ref(pmt.nth(1, (pmt.nth(0, msg))), 0))) # check rx_time value self.assertEqual( 0.0, pmt.to_double(pmt.tuple_ref(pmt.nth(1, (pmt.nth(0, msg))), 1))) self.assertEqual( "velocity", pmt.symbol_to_string(pmt.nth( 0, (pmt.nth(1, msg))))) # check velocity message part (symbol) self.assertAlmostEqual( 1, velocity / pmt.f32vector_ref(pmt.nth(1, (pmt.nth(1, msg))), 0), 2) # check velocity value
def test_rms(self): self.emitter = pdu_utils.message_emitter() self.cf = fhss_utils.cf_estimate(fhss_utils.RMS, []) self.debug = blocks.message_debug() self.tb.msg_connect((self.emitter, 'msg'), (self.cf, 'in')) self.tb.msg_connect((self.cf, 'out'), (self.debug, 'store')) # original data in_data = np.exp(1j * np.array(np.linspace(0, 1 * np.pi * .02, 20))) i_vec = pmt.init_c32vector(len(in_data), in_data) out_data = [(1 + 0j), (0.9999966 + 0.0026077442j), (0.9999864 + 0.0052154697j), (0.9999694 + 0.007823161j), (0.99994564 + 0.010430798j), (0.99991506 + 0.013038365j), (0.99987763 + 0.015645843j), (0.99983346 + 0.018253215j), (0.99978244 + 0.020860463j), (0.9997247 + 0.023467569j), (0.99966 + 0.026074518j), (0.99958867 + 0.028681284j), (0.9995105 + 0.03128786j), (0.9994256 + 0.033894222j), (0.99933374 + 0.03650035j), (0.99923515 + 0.03910623j), (0.9991298 + 0.04171185j), (0.99901766 + 0.044317182j), (0.9988987 + 0.046922214j), (0.9987729 + 0.04952693j)] e_vec = pmt.init_c32vector(len(out_data), out_data) meta = pmt.make_dict() meta = pmt.dict_add(meta, pmt.intern("sample_rate"), pmt.from_float(1e6)) meta = pmt.dict_add(meta, pmt.intern("center_frequency"), pmt.from_float(910.6e6)) in_pdu = pmt.cons(meta, i_vec) e_pdu = pmt.cons(meta, e_vec) self.tb.start() time.sleep(.001) self.emitter.emit(in_pdu) time.sleep(.01) self.tb.stop() self.tb.wait() # parse output #print("got ", list(pmt.to_python(pmt.cdr(self.debug.get_message(0))))) #print("got ", self.debug.get_message(0)) rcv = self.debug.get_message(0) rcv_meta = pmt.car(rcv) rcv_data = pmt.cdr(rcv) rcv_cf = pmt.to_double( pmt.dict_ref(rcv_meta, pmt.intern("center_frequency"), pmt.PMT_NIL)) # asserts self.assertComplexTuplesAlmostEqual( tuple(pmt.c32vector_elements(rcv_data)), tuple(out_data), 2) self.assertTrue(abs(rcv_cf - 910.6001e6) < 100)
def update_timestamp(hdr,seg_size): if pmt.dict_has_key(hdr, pmt.string_to_symbol("rx_time")): r = pmt.dict_ref(hdr, pmt.string_to_symbol("rx_time"), pmt.PMT_NIL) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs else: sys.stderr.write("Could not find key 'time': \ invalid or corrupt data file.\n") sys.exit(1) new_hdr = pmt.dict_delete(hdr, pmt.intern("rx_time")) if pmt.dict_has_key(hdr, pmt.intern("rx_rate")): r = pmt.dict_ref(hdr, pmt.intern("rx_rate"), pmt.PMT_NIL) rate = pmt.to_double(r) new_t = t + float(seg_size)/rate new_secs = long(new_t) new_fracs = new_t - new_secs time_val = pmt.make_tuple(pmt.from_uint64(new_secs), pmt.from_double(new_fracs)) new_hdr = pmt.dict_add(new_hdr, pmt.intern("rx_time"), time_val) return new_hdr
def test_001_t (self): # set up fg test_len = 1024 packet_len = test_len samp_rate = 2000 center_freq = 1e9 velocity = 15 src = radar.signal_generator_cw_c(packet_len,samp_rate,(0,0),1) head = blocks.head(8,test_len) sim = radar.static_target_simulator_cc((10,10),(velocity,velocity),(1e9,1e9),(0,0),(0,),samp_rate,center_freq,1,True,False) mult = blocks.multiply_cc() fft = radar.ts_fft_cc(packet_len) cfar = radar.os_cfar_c(samp_rate, 5, 0, 0.78, 10, True) est = radar.estimator_cw(center_freq) res = radar.print_results() debug = blocks.message_debug() self.tb.connect(src,head,(mult,1)) self.tb.connect(head,sim,(mult,0)) self.tb.connect(mult,fft,cfar) self.tb.msg_connect(cfar,'Msg out',est,'Msg in') self.tb.msg_connect(est,'Msg out',res,'Msg in') self.tb.msg_connect(est,'Msg out',debug,'store') #self.tb.msg_connect(est,'Msg out',debug,'print') self.tb.start() sleep(0.5) self.tb.stop() self.tb.wait() # check data msg = debug.get_message(0) self.assertEqual( "rx_time", pmt.symbol_to_string(pmt.nth(0,(pmt.nth(0,msg)))) ) # check rx_time message part (symbol) self.assertEqual( 0, pmt.to_uint64(pmt.tuple_ref(pmt.nth(1,(pmt.nth(0,msg))),0)) ) # check rx_time value self.assertEqual( 0.0, pmt.to_double(pmt.tuple_ref(pmt.nth(1,(pmt.nth(0,msg))),1)) ) self.assertEqual( "velocity", pmt.symbol_to_string(pmt.nth(0,(pmt.nth(1,msg)))) ) # check velocity message part (symbol) self.assertAlmostEqual( 1, velocity/pmt.f32vector_ref(pmt.nth(1,(pmt.nth(1,msg))),0), 2 ) # check velocity value
def process_measurement(self,msg): if pmt.is_tuple(msg): key = pmt.symbol_to_string(pmt.tuple_ref(msg,0)) if key == "freq_offset": freq_offset = pmt.to_double(pmt.tuple_ref(msg,1)) ppm = -freq_offset/self.fc*1.0e6 state = pmt.symbol_to_string(pmt.tuple_ref(msg,2)) self.last_state = state if abs(ppm) > 100: #safeguard against flawed measurements ppm = 0 self.reset() if state == "fcch_search": msg_ppm = pmt.from_double(ppm) self.message_port_pub(pmt.intern("ppm"), msg_ppm) self.timer.cancel() self.timer = Timer(0.5, self.timed_reset) self.timer.start() elif state == "synchronized": self.timer.cancel() if self.first_measurement: self.ppm_estimate = ppm self.first_measurement = False else: self.ppm_estimate = (1-self.alfa)*self.ppm_estimate+self.alfa*ppm if self.counter == 5: self.counter = 0 if abs(self.last_ppm_estimate-self.ppm_estimate) > 0.1: msg_ppm = pmt.from_double(ppm) self.message_port_pub(pmt.intern("ppm"), msg_ppm) self.last_ppm_estimate = self.ppm_estimate else: self.counter=self.counter+1 elif state == "sync_loss": self.reset() msg_ppm = pmt.from_double(0.0) self.message_port_pub(pmt.intern("ppm"), msg_ppm)
def test_001(self): N = 1000 outfile = "test_out.dat" detached = False samp_rate = 200000 key = pmt.intern("samp_rate") val = pmt.from_double(samp_rate) extras = pmt.make_dict() extras = pmt.dict_add(extras, key, val) extras_str = pmt.serialize_str(extras) data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) fsnk = blocks.file_meta_sink(gr.sizeof_gr_complex, outfile, samp_rate, 1, blocks.GR_FILE_FLOAT, True, 1000000, extras_str, detached) fsnk.set_unbuffered(True) self.tb.connect(src, fsnk) self.tb.run() fsnk.close() handle = open(outfile, "rb") header_str = handle.read(parse_file_metadata.HEADER_LENGTH) if(len(header_str) == 0): self.assertFalse() try: header = pmt.deserialize_str(header_str) except RuntimeError: self.assertFalse() info = parse_file_metadata.parse_header(header, False) extra_str = handle.read(info["extra_len"]) self.assertEqual(len(extra_str) > 0, True) handle.close() try: extra = pmt.deserialize_str(extra_str) except RuntimeError: self.assertFalse() extra_info = parse_file_metadata.parse_extra_dict(extra, info, False) self.assertEqual(info['rx_rate'], samp_rate) self.assertEqual(pmt.to_double(extra_info['samp_rate']), samp_rate) # Test file metadata source src.rewind() fsrc = blocks.file_meta_source(outfile, False) vsnk = blocks.vector_sink_c() tsnk = blocks.tag_debug(gr.sizeof_gr_complex, "QA") ssnk = blocks.vector_sink_c() self.tb.disconnect(src, fsnk) self.tb.connect(fsrc, vsnk) self.tb.connect(fsrc, tsnk) self.tb.connect(src, ssnk) self.tb.run() fsrc.close() # Test to make sure tags with 'samp_rate' and 'rx_rate' keys # were generated and received correctly. tags = tsnk.current_tags() for t in tags: if(pmt.eq(t.key, pmt.intern("samp_rate"))): self.assertEqual(pmt.to_double(t.value), samp_rate) elif(pmt.eq(t.key, pmt.intern("rx_rate"))): self.assertEqual(pmt.to_double(t.value), samp_rate) # Test that the data portion was extracted and received correctly. self.assertComplexTuplesAlmostEqual(vsnk.data(), ssnk.data(), 5) os.remove(outfile)
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if(pmt.is_dict(p) is False): sys.stderr.write("Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if(pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if(VERBOSE): print "Version Number: {0}".format(version) else: sys.stderr.write("Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if(VERBOSE): print "Sample Rate: " + eng_notation.num_to_str(samp_rate) + "SPS" else: sys.stderr.write("Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if(VERBOSE): time = datetime.fromtimestamp(t).strftime('%m/%d/%Y %H:%M:%S') print "Timestamp (Unix Epoch): " + time print "Integer Seconds: " + repr(secs) print "Fractional Seconds: " + repr(fracs) #print "Linux Epoch: {0:.6f}".format(t) + " Seconds" else: sys.stderr.write("Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if(pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if(VERBOSE): print "Item Size: " + eng_notation.num_to_str(dsize) + " Bytes" else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if(pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if(VERBOSE): print "Data Type: {0} ({1})".format(stype, dtype) else: sys.stderr.write("Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if(pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) #global cplx cplx = pmt.to_bool(r) info["cplx"] = cplx if(VERBOSE): print "Complex? {0}".format(cplx) global vecchk global tsize #print cplx #print dtype #print dsize if(cplx==False): if(dtype==0): tsize=1 elif(dtype==1): tsize=4 elif(dtype==2): tsize=4 elif(dtype==3): tsize=4 elif(dtype==5): tsize=4 elif(dtype==6): tsize=8 else: tsize=64 #print tsize vecchk = dsize/tsize #print vecchk if(vecchk>1): print "The data is a vector containing {0} elements.".format(vecchk) else: print "The data is not a vector." '''else: sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) ''' # EXTRACT WHERE CURRENT SEGMENT STARTS if(pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if(VERBOSE): print "Header Length: {0} bytes".format(info["hdr_len"]) print "Extra Length: {0}".format((info["extra_len"])) print "Extra Header? {0}".format(info["has_extra"]) else: sys.stderr.write("Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if(pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes/dsize info["nitems"] = nitems info["nbytes"] = nbytes #info["types"] = types if(VERBOSE): #print "Size of Data: {0:2.1e} bytes".format(nbytes) print "Segment Size (bytes): " + eng_notation.num_to_str(nbytes) #print " {0:2.1e} items".format(nitems) print "Segment Size (items): " + eng_notation.num_to_str(nitems) else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info
def parse_header(p, VERBOSE=False): dump = pmt.PMT_NIL info = dict() if(pmt.is_dict(p) is False): sys.stderr.write("Header is not a PMT dictionary: invalid or corrupt data file.\n") sys.exit(1) # GET FILE FORMAT VERSION NUMBER if(pmt.dict_has_key(p, pmt.string_to_symbol("version"))): r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump) version = pmt.to_long(r) if(VERBOSE): print("Version Number: {0}".format(version)) else: sys.stderr.write("Could not find key 'version': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SAMPLE RATE if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump) samp_rate = pmt.to_double(r) info["rx_rate"] = samp_rate if(VERBOSE): print("Sample Rate: {0:.2f} sps".format(samp_rate)) else: sys.stderr.write("Could not find key 'sr': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT TIME STAMP if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))): r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs info["rx_time"] = t if(VERBOSE): print("Seconds: {0:.6f}".format(t)) else: sys.stderr.write("Could not find key 'time': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT ITEM SIZE if(pmt.dict_has_key(p, pmt.string_to_symbol("size"))): r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump) dsize = pmt.to_long(r) info["size"] = dsize if(VERBOSE): print("Item size: {0}".format(dsize)) else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT DATA TYPE if(pmt.dict_has_key(p, pmt.string_to_symbol("type"))): r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump) dtype = pmt.to_long(r) stype = ftype_to_string[dtype] info["type"] = stype if(VERBOSE): print("Data Type: {0} ({1})".format(stype, dtype)) else: sys.stderr.write("Could not find key 'type': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT COMPLEX if(pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))): r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump) cplx = pmt.to_bool(r) info["cplx"] = cplx if(VERBOSE): print("Complex? {0}".format(cplx)) else: sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT WHERE CURRENT SEGMENT STARTS if(pmt.dict_has_key(p, pmt.string_to_symbol("strt"))): r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump) seg_start = pmt.to_uint64(r) info["hdr_len"] = seg_start info["extra_len"] = seg_start - HEADER_LENGTH info["has_extra"] = info["extra_len"] > 0 if(VERBOSE): print("Header Length: {0} bytes".format(info["hdr_len"])) print("Extra Length: {0}".format((info["extra_len"]))) print("Extra Header? {0}".format(info["has_extra"])) else: sys.stderr.write("Could not find key 'strt': invalid or corrupt data file.\n") sys.exit(1) # EXTRACT SIZE OF DATA if(pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))): r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump) nbytes = pmt.to_uint64(r) nitems = nbytes / dsize info["nitems"] = nitems info["nbytes"] = nbytes if(VERBOSE): print("Size of Data: {0} bytes".format(nbytes)) print(" {0} items".format(nitems)) else: sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n") sys.exit(1) return info