def run_flow_graph(sync_sym1, sync_sym2, data_sym): top_block = gr.top_block() carr_offset = random.randint(-max_offset/2, max_offset/2) * 2 tx_data = shift_tuple(sync_sym1, carr_offset) + \ shift_tuple(sync_sym2, carr_offset) + \ shift_tuple(data_sym, carr_offset) channel = [rand_range(min_chan_ampl, max_chan_ampl) * numpy.exp(1j * rand_range(0, 2 * numpy.pi)) for x in range(fft_len)] src = blocks.vector_source_c(tx_data, False, fft_len) chan = blocks.multiply_const_vcc(channel) noise = analog.noise_source_c(analog.GR_GAUSSIAN, wgn_amplitude) add = blocks.add_cc(fft_len) chanest = digital.ofdm_chanest_vcvc(sync_sym1, sync_sym2, 1) sink = blocks.vector_sink_c(fft_len) top_block.connect(src, chan, (add, 0), chanest, sink) top_block.connect(noise, blocks.stream_to_vector(gr.sizeof_gr_complex, fft_len), (add, 1)) top_block.run() channel_est = None carr_offset_hat = 0 rx_sym_est = [0,] * fft_len tags = sink.tags() for tag in tags: if pmt.symbol_to_string(tag.key) == 'ofdm_sync_carr_offset': carr_offset_hat = pmt.to_long(tag.value) self.assertEqual(carr_offset, carr_offset_hat) if pmt.symbol_to_string(tag.key) == 'ofdm_sync_chan_taps': channel_est = shift_tuple(pmt.c32vector_elements(tag.value), carr_offset) shifted_carrier_mask = shift_tuple(carrier_mask, carr_offset) for i in range(fft_len): if shifted_carrier_mask[i] and channel_est[i]: self.assertAlmostEqual(channel[i], channel_est[i], places=0) rx_sym_est[i] = (sink.data()[i] / channel_est[i]).real return (carr_offset, list(shift_tuple(rx_sym_est, -carr_offset_hat)))
def test_006_channel_and_carroffset (self): """ Add a channel, check if it's correctly estimated """ fft_len = 16 carr_offset = 2 # Index 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 sync_symbol1 = (0, 0, 0, 1, 0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, 0) sync_symbol2 = (0, 0, 0, 1j, -1, 1, -1j, 1j, 0, 1, -1j, -1, -1j, 1, 0, 0) data_symbol = (0, 0, 0, 1, -1, 1, -1, 1, 0, 1, -1, -1, -1, 1, 0, 0) # Channel 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 # Shifted (0, 0, 0, 0, 0, 1j, -1, 1, -1j, 1j, 0, 1, -1j, -1, -1j, 1) chanest_exp = (0, 0, 0, 5, 6, 7, 8, 9, 0, 11, 12, 13, 14, 15, 0, 0) tx_data = shift_tuple(sync_symbol1, carr_offset) + \ shift_tuple(sync_symbol2, carr_offset) + \ shift_tuple(data_symbol, carr_offset) channel = range(fft_len) src = blocks.vector_source_c(tx_data, False, fft_len) chan = blocks.multiply_const_vcc(channel) chanest = digital.ofdm_chanest_vcvc(sync_symbol1, sync_symbol2, 1) sink = blocks.vector_sink_c(fft_len) self.tb.connect(src, chan, chanest, sink) self.tb.run() tags = sink.tags() chan_est = None for tag in tags: if pmt.symbol_to_string(tag.key) == 'ofdm_sync_carr_offset': self.assertEqual(pmt.to_long(tag.value), carr_offset) if pmt.symbol_to_string(tag.key) == 'ofdm_sync_chan_taps': chan_est = pmt.c32vector_elements(tag.value) self.assertEqual(chan_est, chanest_exp) self.assertEqual(sink.data(), tuple(numpy.multiply(shift_tuple(data_symbol, carr_offset), channel)))
def test_001_t (self): data = [ord('t'),ord('e'),ord('s'),ord('t')] msg = pmt.list1(pmt.list2(pmt.string_to_symbol("msg_clear"),pmt.init_u8vector(len(data),data))) filename_sk = "secret.key" filename_pk = "public.key" nacl.generate_keypair(filename_sk,filename_pk) strobe = blocks.message_strobe(msg, 100) encrypt_public = nacl.encrypt_public(filename_pk,filename_sk) debug = blocks.message_debug() self.tb.msg_connect(strobe,"strobe",encrypt_public,"Msg clear") self.tb.msg_connect(encrypt_public,"Msg encrypted",debug,"store") self.tb.start() sleep(0.15) self.tb.stop() self.tb.wait() # check results msg_stored = debug.get_message(0) nonce = pmt.nth(0,msg_stored) msg_encrypted = pmt.nth(1,msg_stored) print pmt.symbol_to_string(pmt.nth(0,nonce)), pmt.u8vector_elements(pmt.nth(1,nonce)) print pmt.symbol_to_string(pmt.nth(0,msg_encrypted)), pmt.u8vector_elements(pmt.nth(1,msg_encrypted))
def test_003_channel_no_carroffset (self): """ Add a channel, check if it's correctly estimated """ fft_len = 16 carr_offset = 0 sync_symbol1 = (0, 0, 0, 1, 0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, 0) sync_symbol2 = (0, 0, 0, 1j, -1, 1, -1j, 1j, 0, 1, -1j, -1, -1j, 1, 0, 0) data_symbol = (0, 0, 0, 1, -1, 1, -1, 1, 0, 1, -1, -1, -1, 1, 0, 0) tx_data = sync_symbol1 + sync_symbol2 + data_symbol channel = (0, 0, 0, 2, -2, 2, 3j, 2, 0, 2, 2, 2, 2, 3, 0, 0) src = blocks.vector_source_c(tx_data, False, fft_len) chan = blocks.multiply_const_vcc(channel) chanest = digital.ofdm_chanest_vcvc(sync_symbol1, sync_symbol2, 1) sink = blocks.vector_sink_c(fft_len) sink_chanest = blocks.vector_sink_c(fft_len) self.tb.connect(src, chan, chanest, sink) self.tb.connect((chanest, 1), sink_chanest) self.tb.run() tags = sink.tags() self.assertEqual(shift_tuple(sink.data(), -carr_offset), tuple(numpy.multiply(data_symbol, channel))) for tag in tags: if pmt.symbol_to_string(tag.key) == 'ofdm_sync_carr_offset': self.assertEqual(pmt.to_long(tag.value), carr_offset) if pmt.symbol_to_string(tag.key) == 'ofdm_sync_chan_taps': self.assertEqual(pmt.c32vector_elements(tag.value), channel) self.assertEqual(sink_chanest.data(), channel)
def test_004_channel_no_carroffset_1sym (self): """ Add a channel, check if it's correctly estimated. Only uses 1 synchronisation symbol. """ fft_len = 16 carr_offset = 0 sync_symbol = (0, 0, 0, 1, 0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, 0) data_symbol = (0, 0, 0, 1, -1, 1, -1, 1, 0, 1, -1, -1, -1, 1, 0, 0) tx_data = sync_symbol + data_symbol channel = (0, 0, 0, 2, 2, 2, 2, 3, 3, 2.5, 2.5, -3, -3, 1j, 1j, 0) #channel = (0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0) src = blocks.vector_source_c(tx_data, False, fft_len) chan = blocks.multiply_const_vcc(channel) chanest = digital.ofdm_chanest_vcvc(sync_symbol, (), 1) sink = blocks.vector_sink_c(fft_len) sink_chanest = blocks.vector_sink_c(fft_len) self.tb.connect(src, chan, chanest, sink) self.tb.connect((chanest, 1), sink_chanest) self.tb.run() self.assertEqual(sink_chanest.data(), channel) tags = sink.tags() for tag in tags: if pmt.symbol_to_string(tag.key) == 'ofdm_sync_carr_offset': self.assertEqual(pmt.to_long(tag.value), carr_offset) if pmt.symbol_to_string(tag.key) == 'ofdm_sync_chan_taps': self.assertEqual(pmt.c32vector_elements(tag.value), channel)
def test_001_t(self): # set up fg test_len = 1024 packet_len = test_len samp_rate = 2000 center_freq = 1e9 velocity = (5, 15, 20) src = radar.signal_generator_cw_c(packet_len, samp_rate, (0, 0), 1) head = blocks.head(8, test_len) sim = radar.static_target_simulator_cc( (10, 10, 10), velocity, (1e12, 1e12, 1e12), (0, 0, 0), (0,), samp_rate, center_freq, 1, True, False ) mult = blocks.multiply_cc() fft = radar.ts_fft_cc(packet_len) cfar = radar.os_cfar_c(samp_rate, 5, 0, 0.78, 10, True) est = radar.estimator_cw(center_freq) res1 = radar.print_results() res2 = radar.print_results() gate = radar.msg_gate(("velocity", "bla"), (8, 8), (17, 17)) debug1 = blocks.message_debug() debug2 = blocks.message_debug() self.tb.connect(src, head, (mult, 1)) self.tb.connect(head, sim, (mult, 0)) self.tb.connect(mult, fft, cfar) self.tb.msg_connect(cfar, "Msg out", est, "Msg in") self.tb.msg_connect(est, "Msg out", res1, "Msg in") self.tb.msg_connect(est, "Msg out", debug1, "store") self.tb.msg_connect(est, "Msg out", gate, "Msg in") self.tb.msg_connect(gate, "Msg out", debug2, "store") self.tb.msg_connect(gate, "Msg out", res2, "Msg in") self.tb.start() sleep(0.5) self.tb.stop() self.tb.wait() # check data msg1 = debug1.get_message(0) # msg without gate msg2 = debug2.get_message(0) # msg with gate self.assertEqual( "velocity", pmt.symbol_to_string(pmt.nth(0, (pmt.nth(1, msg1)))) ) # check velocity message part (symbol), 1 self.assertEqual( "velocity", pmt.symbol_to_string(pmt.nth(0, (pmt.nth(1, msg2)))) ) # check velocity message part (symbol), 2 self.assertEqual(pmt.length(pmt.nth(1, pmt.nth(1, msg1))), 3) # check number of targets without gate self.assertEqual(pmt.length(pmt.nth(1, pmt.nth(1, msg2))), 1) # check nubmer of targets with gate self.assertAlmostEqual( 1, velocity[1] / pmt.f32vector_ref(pmt.nth(1, (pmt.nth(1, msg2))), 0), 1 ) # check velocity value
def test_002_simpledfe (self): """ Use the simple DFE equalizer. """ fft_len = 8 # 4 5 6 7 0 1 2 3 tx_data = [-1, -1, 1, 2, -1, 3, 0, -1, # 0 -1, -1, 0, 2, -1, 2, 0, -1, # 8 -1, -1, 3, 0, -1, 1, 0, -1, # 16 (Pilot symbols) -1, -1, 1, 1, -1, 0, 2, -1] # 24 cnst = digital.constellation_qpsk() tx_signal = [cnst.map_to_points_v(x)[0] if x != -1 else 0 for x in tx_data] occupied_carriers = ((1, 2, 6, 7),) pilot_carriers = ((), (), (1, 2, 6, 7), ()) pilot_symbols = ( [], [], [cnst.map_to_points_v(x)[0] for x in (1, 0, 3, 0)], [] ) equalizer = digital.ofdm_equalizer_simpledfe( fft_len, cnst.base(), occupied_carriers, pilot_carriers, pilot_symbols, 0, 0.01 ) channel = [ 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, # These coefficients will be rotated slightly... 0, 0, 1j, 1j, 0, 1j, 1j, 0, # Go crazy here! 0, 0, 1j, 1j, 0, 1j, 1j, 0 # ...and again here. ] for idx in range(fft_len, 2*fft_len): channel[idx] = channel[idx-fft_len] * numpy.exp(1j * .1 * numpy.pi * (numpy.random.rand()-.5)) idx2 = idx+2*fft_len channel[idx2] = channel[idx2] * numpy.exp(1j * 0 * numpy.pi * (numpy.random.rand()-.5)) len_tag_key = "frame_len" len_tag = gr.tag_t() len_tag.offset = 0 len_tag.key = pmt.string_to_symbol(len_tag_key) len_tag.value = pmt.from_long(4) chan_tag = gr.tag_t() chan_tag.offset = 0 chan_tag.key = pmt.string_to_symbol("ofdm_sync_chan_taps") chan_tag.value = pmt.init_c32vector(fft_len, channel[:fft_len]) src = blocks.vector_source_c(numpy.multiply(tx_signal, channel), False, fft_len, (len_tag, chan_tag)) eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, len_tag_key, True) sink = blocks.vector_sink_c(fft_len) self.tb.connect(src, eq, sink) self.tb.run () rx_data = [cnst.decision_maker_v((x,)) if x != 0 else -1 for x in sink.data()] self.assertEqual(tx_data, rx_data) for tag in sink.tags(): if pmt.symbol_to_string(tag.key) == len_tag_key: self.assertEqual(pmt.to_long(tag.value), 4) if pmt.symbol_to_string(tag.key) == "ofdm_sync_chan_taps": self.assertComplexTuplesAlmostEqual(list(pmt.c32vector_elements(tag.value)), channel[-fft_len:], places=1)
def pre_hook(val): key_pmt = es.event_field( val.msg, key_sym ); key = pmt.symbol_to_string( key_pmt ); print key; if(key == "*"): key = "10"; elif(key == "#"): key = "12"; elif(key=="0"): ival = 11; ival = int(key); row = (ival - 1)/3; col = (ival - 1)%3; rowfreqs = [697.0, 770.0, 852.0, 941.0]; colfreqs = [1209.0, 1336.0, 1477.0, 1633.0]; assert(row < len(rowfreqs)); assert(col < len(colfreqs)); blocks = val.handler.pb2(); blocks["src_L"].set_frequency(rowfreqs[row]); blocks["src_R"].set_frequency(colfreqs[col]); print "set freq %s"%( str((rowfreqs[row], colfreqs[col])) ); r = es.es_hook_rval(); return r;
def test_004_8bits_formatter_ofdm (self): occupied_carriers = ((1, 2, 3, 5, 6, 7),) # 3 PDUs: | | | | data = (1, 2, 3, 4, 1, 2, 1, 2, 3, 4) tagname = "packet_len" tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol(tagname) tag1.value = pmt.from_long(4) tag2 = gr.tag_t() tag2.offset = 4 tag2.key = pmt.string_to_symbol(tagname) tag2.value = pmt.from_long(2) tag3 = gr.tag_t() tag3.offset = 6 tag3.key = pmt.string_to_symbol(tagname) tag3.value = pmt.from_long(4) src = blocks.vector_source_b(data, False, 1, (tag1, tag2, tag3)) formatter_object = digital.packet_header_ofdm(occupied_carriers, 1, tagname) self.assertEqual(formatter_object.header_len(), 6) self.assertEqual(pmt.symbol_to_string(formatter_object.len_tag_key()), tagname) header = digital.packet_headergenerator_bb(formatter_object.formatter(), tagname) sink = blocks.vector_sink_b() self.tb.connect(src, header, sink) self.tb.run() expected_data = ( 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ) self.assertEqual(sink.data(), expected_data)
def count_bursts(data, tags, tsb_tag_key, vlen=1): lengthtags = [t for t in tags if pmt.symbol_to_string(t.key) == tsb_tag_key] lengths = {} for tag in lengthtags: if tag.offset in lengths: raise ValueError( "More than one tags with key {0} with the same offset={1}." .format(tsb_tag_key, tag.offset)) lengths[tag.offset] = pmt.to_long(tag.value)*vlen in_burst = False in_packet = False packet_length = None packet_pos = None burst_count = 0 for pos in range(len(data)): if pos in lengths: if in_packet: print("Got tag at pos {0} current packet_pos is {1}".format(pos, packet_pos)) raise Exception("Received packet tag while in packet.") packet_pos = -1 packet_length = lengths[pos] in_packet = True if not in_burst: burst_count += 1 in_burst = True elif not in_packet: in_burst = False if in_packet: packet_pos += 1 if packet_pos == packet_length-1: in_packet = False packet_pos = None return burst_count
def vectors_to_packets(data, tags, tsb_tag_key, vlen=1): lengthtags = [t for t in tags if pmt.symbol_to_string(t.key) == tsb_tag_key] lengths = {} for tag in lengthtags: if tag.offset in lengths: raise ValueError( "More than one tags with key {0} with the same offset={1}." .format(tsb_tag_key, tag.offset)) lengths[tag.offset] = pmt.to_long(tag.value)*vlen if 0 not in lengths: raise ValueError("There is no tag with key {0} and an offset of 0" .format(tsb_tag_key)) pos = 0 packets = [] while pos < len(data): if pos not in lengths: raise ValueError("There is no tag with key {0} and an offset of {1}." "We were expecting one." .format(tsb_tag_key, pos)) length = lengths[pos] if length == 0: raise ValueError("Packets cannot have zero length.") if pos+length > len(data): raise ValueError("The final packet is incomplete.") packets.append(data[pos: pos+length]) pos += length return packets
def test_0010_tag_propagation (self): """ Make sure tags on the CRC aren't lost. """ # Data with precalculated CRC data = ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1 ) # 2, 67, 225, 188 testtag = gr.tag_t() testtag.offset = len(data)-1 testtag.key = pmt.string_to_symbol('tag1') testtag.value = pmt.from_long(0) src = blocks.vector_source_b(data, False, 1, (testtag,)) crc_check = digital.crc32_bb(True, self.tsb_key, False) sink = blocks.tsb_vector_sink_b(tsb_key=self.tsb_key) self.tb.connect( src, blocks.stream_to_tagged_stream(gr.sizeof_char, 1, len(data), self.tsb_key), crc_check, sink ) self.tb.run() self.assertEqual([len(data)-33,], [tag.offset for tag in sink.tags() if pmt.symbol_to_string(tag.key) == 'tag1'])
def handle_msg(self, msg): if(pmt.is_tuple(msg)): t = pmt.to_long(pmt.tuple_ref(msg, 0)) m = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) de = DataEvent([t, m]) wx.PostEvent(self.panel, de) del de
def test_005_packet_len_tag (self): """ Standard test """ fft_len = 16 tx_symbols = range(1, 16); tx_symbols = (0, 1, 1j, 2, 3, 0, 0, 0, 0, 0, 0, 4, 5, 2j, 6, 0, 0, 7, 8, 3j, 9, 0, 0, 0, 0, 0, 0, 10, 4j, 11, 12, 0, 0, 13, 1j, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 0, 0) expected_result = tuple(range(1, 16)) occupied_carriers = ((1, 3, 4, 11, 12, 14), (1, 2, 4, 11, 13, 14),) n_syms = len(tx_symbols)/fft_len tag_name = "len" tag = gr.tag_t() tag.offset = 0 tag.key = pmt.string_to_symbol(tag_name) tag.value = pmt.from_long(n_syms) tag2 = gr.tag_t() tag2.offset = 0 tag2.key = pmt.string_to_symbol("packet_len") tag2.value = pmt.from_long(len(expected_result)) src = blocks.vector_source_c(tx_symbols, False, fft_len, (tag, tag2)) serializer = digital.ofdm_serializer_vcc(fft_len, occupied_carriers, tag_name, "packet_len", 0, "", False) sink = blocks.vector_sink_c() self.tb.connect(src, serializer, sink) self.tb.run () self.assertEqual(sink.data(), expected_result) self.assertEqual(len(sink.tags()), 1) result_tag = sink.tags()[0] self.assertEqual(pmt.symbol_to_string(result_tag.key), "packet_len") self.assertEqual(pmt.to_long(result_tag.value), len(expected_result))
def test_001_t(self): data = [ord("a"), ord("b"), ord("c"), ord("d")] msg = pmt.list1(pmt.list2(pmt.string_to_symbol("msg_clear"), pmt.init_u8vector(len(data), data))) filename_key = "secret.a" nacl.generate_key(filename_key) strobe = blocks.message_strobe(msg, 100) encrypt_secret = nacl.encrypt_secret(filename_key) decrypt_secret = nacl.decrypt_secret(filename_key) debug = blocks.message_debug() self.tb.msg_connect(strobe, "strobe", encrypt_secret, "Msg clear") self.tb.msg_connect(encrypt_secret, "Msg encrypted", decrypt_secret, "Msg encrypted") self.tb.msg_connect(decrypt_secret, "Msg decrypted", debug, "store") self.tb.start() sleep(0.15) self.tb.stop() self.tb.wait() # check results msg_out = debug.get_message(0) msg_symbol = pmt.symbol_to_string(pmt.nth(0, pmt.nth(0, msg_out))) msg_decrypted = pmt.u8vector_elements(pmt.nth(1, pmt.nth(0, msg_out))) print msg_symbol, msg_decrypted print "msg_clear", data for k in range(len(data)): self.assertEqual(data[k], msg_decrypted[k])
def test_001b_shifted (self): """ Same as before, but shifted, because that's the normal mode in OFDM Rx """ fft_len = 16 tx_symbols = ( 0, 0, 0, 0, 0, 0, 1, 2, 0, 3, 4, 5, 0, 0, 0, 0, 0, 0, 0, 0, 6, 1j, 7, 8, 0, 9, 10, 1j, 11, 0, 0, 0, 0, 0, 0, 0, 0, 12, 13, 14, 0, 15, 16, 17, 0, 0, 0, 0, ) expected_result = tuple(range(18)) occupied_carriers = ((13, 14, 15, 1, 2, 3), (-4, -2, -1, 1, 2, 4),) n_syms = len(tx_symbols)/fft_len tag_name = "len" tag = gr.tag_t() tag.offset = 0 tag.key = pmt.string_to_symbol(tag_name) tag.value = pmt.from_long(n_syms) src = blocks.vector_source_c(tx_symbols, False, fft_len, (tag,)) serializer = digital.ofdm_serializer_vcc(fft_len, occupied_carriers, tag_name) sink = blocks.vector_sink_c() self.tb.connect(src, serializer, sink) self.tb.run () self.assertEqual(sink.data(), expected_result) self.assertEqual(len(sink.tags()), 1) result_tag = sink.tags()[0] self.assertEqual(pmt.symbol_to_string(result_tag.key), tag_name) self.assertEqual(pmt.to_long(result_tag.value), n_syms * len(occupied_carriers[0]))
def test_001(self): # We're using a really simple preamble so that the correlation # is straight forward. preamble = [0, 0, 0, 1, 0, 0, 0] # Our pulse shape has this width (in units of symbols). pulse_width = 1.5 # The number of filters to use for resampling. n_filters = 12 sps = 3 data = [0]*10 + preamble + [0]*40 src = blocks.vector_source_c(data) # We want to generate taps with a sampling rate of sps=n_filters for resampling # purposes. pulse_shape = make_parabolic_pulse_shape(sps=n_filters, N=0.5, scale=35) # Create our resampling filter to generate the data for the correlator. shape = filter.pfb_arb_resampler_ccf(sps, pulse_shape, n_filters) # Generate the correlator block itself. correlator = digital.correlate_and_sync_cc(preamble, pulse_shape, sps, n_filters) # Connect it all up and go. snk = blocks.vector_sink_c() null = blocks.null_sink(gr.sizeof_gr_complex) tb = gr.top_block() tb.connect(src, shape, correlator, snk) tb.connect((correlator, 1), null) tb.run() # Look at the tags. Retrieve the timing offset. data = snk.data() offset = None timing_error = None for tag in snk.tags(): key = pmt.symbol_to_string(tag.key) if key == "time_est": offset = tag.offset timing_error = pmt.to_double(tag.value) if offset is None: raise ValueError("No tags found.") # Detect where the middle of the preamble is. # Assume we have only one peak and that it is symmetric. sum_id = 0 sum_d = 0 for i, d in enumerate(data): sum_id += i*abs(d) sum_d += abs(d) data_i = sum_id/sum_d if offset is not None: diff = data_i-offset remainder = -(diff%sps) if remainder < -sps/2.0: remainder += sps tol = 0.2 difference = timing_error - remainder difference = difference % sps if abs(difference) >= tol: print("Tag gives timing estimate of {0}. QA calculates it as {1}. Tolerance is {2}".format(timing_error, remainder, tol)) self.assertTrue(abs(difference) < tol)
def pre_hook(val): payload = es.event_field( val.msg, key_sym ); payload = pmt.symbol_to_string( payload ); v = numpy.fromstring( payload, dtype=numpy.byte ); blocks = val.handler.pb2(); blocks["src"].set_data( es.StrVector([payload]), len(v) ); r = es.es_hook_rval(); return r;
def handle_msg(self, msg): if(pmt.is_tuple(msg)): t = pmt.to_long(pmt.tuple_ref(msg, 0)) m = pmt.symbol_to_string(pmt.tuple_ref(msg, 1)) if (t==0): #program information msg = unicode(m, errors='replace') self.stationID = msg
def handle_msg_fft_average(self, msg_pmt): # PMT TYPE CHECK AND UNPACKING if pmt.is_symbol(msg_pmt): msg_str = pmt.symbol_to_string(msg_pmt) if msg_str == "FFT_AVG_FINISH_ACK": self.top_block.rise_fft_avg_flag() elif pmt.is_f32vector(msg_pmt): data = pmt.f32vector_elements(msg_pmt) self.fft_data_buffer = data
def handle_msg_energy(self, msg_pmt): # PMT TYPE CHECK AND UNPACKING if pmt.is_symbol(msg_pmt): msg_str = pmt.symbol_to_string(msg_pmt) if msg_str == "E_FINISH_ACK": self.top_block.rise_energy_flag() elif pmt.is_f32vector(msg_pmt): data = pmt.f32vector_elements(msg_pmt) self.top_block.on_receive_pkt_data(data)
def test_001_offset_2sym (self): """ Add a frequency offset, check if it's correctly detected. Also add some random tags and see if they come out at the correct position. """ fft_len = 16 carr_offset = -2 sync_symbol1 = (0, 0, 0, 1, 0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, 0) sync_symbol2 = (0, 0, 0, 1, -1, 1, -1, 1, 0, 1, -1, -1, -1, 1, 0, 0) data_symbol = (0, 0, 0, 1, -1, 1, -1, 1, 0, 1, -1, -1, -1, 1, 0, 0) tx_data = shift_tuple(sync_symbol1, carr_offset) + \ shift_tuple(sync_symbol2, carr_offset) + \ shift_tuple(data_symbol, carr_offset) tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol("test_tag_1") tag1.value = pmt.from_long(23) tag2 = gr.tag_t() tag2.offset = 2 tag2.key = pmt.string_to_symbol("test_tag_2") tag2.value = pmt.from_long(42) src = blocks.vector_source_c(tx_data, False, fft_len, (tag1, tag2)) chanest = digital.ofdm_chanest_vcvc(sync_symbol1, sync_symbol2, 1) sink = blocks.vector_sink_c(fft_len) self.tb.connect(src, chanest, sink) self.tb.run() self.assertEqual(shift_tuple(sink.data(), -carr_offset), data_symbol) tags = sink.tags() detected_tags = { 'ofdm_sync_carr_offset': False, 'test_tag_1': False, 'test_tag_2': False } for tag in tags: if pmt.symbol_to_string(tag.key) == 'ofdm_sync_carr_offset': carr_offset_hat = pmt.to_long(tag.value) self.assertEqual(pmt.to_long(tag.value), carr_offset) if pmt.symbol_to_string(tag.key) == 'test_tag_1': self.assertEqual(tag.offset, 0) if pmt.symbol_to_string(tag.key) == 'test_tag_2': self.assertEqual(tag.offset, 0) detected_tags[pmt.symbol_to_string(tag.key)] = True self.assertTrue(all(detected_tags.values()))
def handle_ctrl(self, msg): ctrl_string = pmt.symbol_to_string(msg) if ctrl_string == "PTT": self.ser.setRTS(True) print "PTT RTS ON" elif ctrl_string == "!PTT": self.ser.setRTS(False) print "PTT RTS OFF" else: print "Invalid ctrl command"
def handler(self, rds_data): msg_type = pmt.to_long(pmt.tuple_ref(rds_data, 0)) msg = pmt.symbol_to_string(pmt.tuple_ref(rds_data, 1)) if msg_type == 4: self.radio_text.setText(msg.strip()) elif msg_type == 1: self.station_name.setText(msg) elif msg_type == 0: self.program_info.setText(callsign(msg)) elif msg_type == 3: self.flags.update(msg)
def test_003_t (self): """ more advanced: - 6 symbols per carrier - 2 pilots per carrier - have enough data for nearly 3 OFDM symbols - send that twice - add some random tags - don't shift """ tx_symbols = list(range(1, 16)); # 15 symbols pilot_symbols = ((1j, 2j), (3j, 4j)) occupied_carriers = ((1, 3, 4, 11, 12, 14), (1, 2, 4, 11, 13, 14),) pilot_carriers = ((2, 13), (3, 12)) expected_result = (0, 1, 1j, 2, 3, 0, 0, 0, 0, 0, 0, 4, 5, 2j, 6, 0, 0, 7, 8, 3j, 9, 0, 0, 0, 0, 0, 0, 10, 4j, 11, 12, 0, 0, 13, 1j, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 0, 0) fft_len = 16 testtag1 = gr.tag_t() testtag1.offset = 0 testtag1.key = pmt.string_to_symbol('tag1') testtag1.value = pmt.from_long(0) testtag2 = gr.tag_t() testtag2.offset = 7 # On the 2nd OFDM symbol testtag2.key = pmt.string_to_symbol('tag2') testtag2.value = pmt.from_long(0) testtag3 = gr.tag_t() testtag3.offset = len(tx_symbols)+1 # First OFDM symbol of packet 2 testtag3.key = pmt.string_to_symbol('tag3') testtag3.value = pmt.from_long(0) testtag4 = gr.tag_t() testtag4.offset = 2*len(tx_symbols)-1 # Last OFDM symbol of packet 2 testtag4.key = pmt.string_to_symbol('tag4') testtag4.value = pmt.from_long(0) src = blocks.vector_source_c(tx_symbols * 2, False, 1, (testtag1, testtag2, testtag3, testtag4)) alloc = digital.ofdm_carrier_allocator_cvc(fft_len, occupied_carriers, pilot_carriers, pilot_symbols, (), self.tsb_key, False) sink = blocks.tsb_vector_sink_c(fft_len) self.tb.connect(src, blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, 1, len(tx_symbols), self.tsb_key), alloc, sink) self.tb.run () self.assertEqual(sink.data()[0], expected_result) tags_found = {'tag1': False, 'tag2': False, 'tag3': False, 'tag4': False} correct_offsets = {'tag1': 0, 'tag2': 1, 'tag3': 3, 'tag4': 5} for tag in sink.tags(): key = pmt.symbol_to_string(tag.key) if key in list(tags_found.keys()): tags_found[key] = True self.assertEqual(correct_offsets[key], tag.offset) self.assertTrue(all(tags_found.values()))
def test_001_t (self): # set up fg test_len = 1024 packet_len = test_len samp_rate = 2000 center_freq = 1e9 velocity = 15 src = radar.signal_generator_cw_c(packet_len,samp_rate,(0,0),1) head = blocks.head(8,test_len) sim = radar.static_target_simulator_cc((10,10),(velocity,velocity),(1e9,1e9),(0,0),(0,),samp_rate,center_freq,1,True,False) mult = blocks.multiply_cc() fft = radar.ts_fft_cc(packet_len) cfar = radar.os_cfar_c(samp_rate, 5, 0, 0.78, 10, True) est = radar.estimator_cw(center_freq) res = radar.print_results() debug = blocks.message_debug() self.tb.connect(src,head,(mult,1)) self.tb.connect(head,sim,(mult,0)) self.tb.connect(mult,fft,cfar) self.tb.msg_connect(cfar,'Msg out',est,'Msg in') self.tb.msg_connect(est,'Msg out',res,'Msg in') self.tb.msg_connect(est,'Msg out',debug,'store') #self.tb.msg_connect(est,'Msg out',debug,'print') self.tb.start() sleep(0.5) self.tb.stop() self.tb.wait() # check data msg = debug.get_message(0) self.assertEqual( "rx_time", pmt.symbol_to_string(pmt.nth(0,(pmt.nth(0,msg)))) ) # check rx_time message part (symbol) self.assertEqual( 0, pmt.to_uint64(pmt.tuple_ref(pmt.nth(1,(pmt.nth(0,msg))),0)) ) # check rx_time value self.assertEqual( 0.0, pmt.to_double(pmt.tuple_ref(pmt.nth(1,(pmt.nth(0,msg))),1)) ) self.assertEqual( "velocity", pmt.symbol_to_string(pmt.nth(0,(pmt.nth(1,msg)))) ) # check velocity message part (symbol) self.assertAlmostEqual( 1, velocity/pmt.f32vector_ref(pmt.nth(1,(pmt.nth(1,msg))),0), 2 ) # check velocity value
def test_000(self): num_msgs = 10 msg_interval = 1000 msg_list = [] for i in range(num_msgs): msg_list.append(pmt.from_long(i)) # Create vector source with dummy data to trigger messages src_data = [] for i in range(num_msgs*msg_interval): src_data.append(float(i)) src = blocks.vector_source_f(src_data, False) msg_gen = message_generator(msg_list, msg_interval) msg_cons = message_consumer() # Connect vector source to message gen self.tb.connect(src, msg_gen) # Connect message generator to message consumer self.tb.msg_connect(msg_gen, 'out_port', msg_cons, 'in_port') # Verify that the messgae port query functions work self.assertEqual(pmt.symbol_to_string(pmt.vector_ref( msg_gen.message_ports_out(), 0)), 'out_port') self.assertEqual(pmt.symbol_to_string(pmt.vector_ref( msg_cons.message_ports_in(), 0)), 'in_port') # Run to verify message passing self.tb.start() # Wait for all messages to be sent while msg_gen.msg_ctr < num_msgs: time.sleep(0.5) self.tb.stop() self.tb.wait() # Verify that the message consumer got all the messages self.assertEqual(num_msgs, len(msg_cons.msg_list)) for i in range(num_msgs): self.assertTrue(pmt.equal(msg_list[i], msg_cons.msg_list[i]))
def process_measurement(self,msg): if pmt.is_tuple(msg): key = pmt.symbol_to_string(pmt.tuple_ref(msg,0)) if key == "freq_offset": freq_offset = pmt.to_double(pmt.tuple_ref(msg,1)) ppm = -freq_offset/self.fc*1.0e6 state = pmt.symbol_to_string(pmt.tuple_ref(msg,2)) self.last_state = state if abs(ppm) > 100: #safeguard against flawed measurements ppm = 0 self.reset() if state == "fcch_search": msg_ppm = pmt.from_double(ppm) self.message_port_pub(pmt.intern("ppm"), msg_ppm) self.timer.cancel() self.timer = Timer(0.5, self.timed_reset) self.timer.start() elif state == "synchronized": self.timer.cancel() if self.first_measurement: self.ppm_estimate = ppm self.first_measurement = False else: self.ppm_estimate = (1-self.alfa)*self.ppm_estimate+self.alfa*ppm if self.counter == 5: self.counter = 0 if abs(self.last_ppm_estimate-self.ppm_estimate) > 0.1: msg_ppm = pmt.from_double(ppm) self.message_port_pub(pmt.intern("ppm"), msg_ppm) self.last_ppm_estimate = self.ppm_estimate else: self.counter=self.counter+1 elif state == "sync_loss": self.reset() msg_ppm = pmt.from_double(0.0) self.message_port_pub(pmt.intern("ppm"), msg_ppm)
def test_001_t(self): n_frames = 500 burst_len = 383 gap_len = 53 tag_key = 'energy_start' data = np.arange(burst_len) ref = np.array([], dtype=np.complex) tags = [] for i in range(n_frames): frame = np.ones(burst_len) * (i + 1) ref = np.concatenate((ref, frame)) tag = gr.tag_t() tag.key = pmt.string_to_symbol(tag_key) tag.offset = burst_len + i * (burst_len + gap_len) tag.srcid = pmt.string_to_symbol('qa') tag.value = pmt.PMT_T tags.append(tag) data = np.concatenate((data, frame, np.zeros(gap_len))) # print(np.reshape(data, (-1, burst_len))) # print('data len', len(data), 'ref len', len(ref)) src = blocks.vector_source_c(data, False, 1, tags) burster = gfdm.extract_burst_cc(burst_len, tag_key) snk = blocks.vector_sink_c() self.tb.connect(src, burster, snk) self.tb.run() res = np.array(snk.data()) rx_tags = snk.tags() for i, t in enumerate(rx_tags): assert pmt.symbol_to_string(t.key) == tag_key assert pmt.to_long(t.value) == burst_len assert pmt.symbol_to_string(t.srcid) == burster.name() assert t.offset == i * burst_len # print t.offset, t.value # check data self.assertComplexTuplesAlmostEqual(ref, res)
def handle_msg(self, msg): tx_string = pmt.symbol_to_string(msg) for i in range(len(tx_string)): if tx_string[i] == '\n': #print len(self.buf) #time.sleep(1) #self.buf = self.buf + (tx_string[i]) self.process(self.buf) self.buf ='' self.count += 1 else: self.buf = self.buf + (tx_string[i])
def mac_handler_method(self, msg): self.mac = pmt.symbol_to_string(msg)
def handler(self, msg): if not self.archive: # no need to waste any time return if not pmt.is_dict(msg): return try: # this will fail if message is a PDU with non-PMT_NIL arguments n = pmt.length(pmt.dict_items(msg)) # a PDU with one element equal to PMT_NIL still looks like a # dictionary...grrrrr! if (n == 1) and (pmt.equal(pmt.car(msg),pmt.PMT_NIL) or \ pmt.equal(pmt.cdr(msg),pmt.PMT_NIL)): # treat as a pdu meta = pmt.car(msg) else: # it's a dictionary meta = msg except: try: # message is a pdu pmt.length(pmt.dict_items(pmt.car(msg))) meta = pmt.car(msg) except: return # extract file components try: fname = pmt.dict_ref(meta, self.filename_tag, pmt.PMT_NIL) file_time = pmt.dict_ref(meta, self.time_tag, pmt.PMT_NIL) freq = pmt.dict_ref(meta, self.freq_tag, pmt.PMT_NIL) rate = pmt.dict_ref(meta, self.rate_tag, pmt.PMT_NIL) if pmt.equal(fname, pmt.PMT_NIL): self.log.warn("No file specified") return f = pmt.symbol_to_string(fname) if self.fname_format == "": # copy immediately self.copy_file( f, os.path.join(self.output_path, os.path.basename(f))) else: base_fname = copy.deepcopy(self.fname_format) # add frequency information to file name if not pmt.equal(freq, pmt.PMT_NIL): freq = pmt.to_double(freq) for freq_spec in self.freq_specs: base_fname = base_fname.replace( freq_spec[0], '%0.0f' % int(freq / freq_spec[1])) if not pmt.equal(rate, pmt.PMT_NIL): rate = pmt.to_double(rate) for rate_spec in self.rate_specs: base_fname = base_fname.replace( rate_spec[0], '%0.0f' % int(rate / rate_spec[1])) # time update if not pmt.equal(file_time, pmt.PMT_NIL): t = pmt.to_uint64(pmt.tuple_ref(file_time,0)) + \ pmt.to_double(pmt.tuple_ref(file_time,1)) base_fname = datetime.datetime.utcfromtimestamp( t).strftime(base_fname) # archive file self.copy_file(f, os.path.join(self.output_path, base_fname)) except Exception as e: self.log.error("Unable to process message:{}".format(e))
def ip_handler_method(self, msg): self.ip = pmt.symbol_to_string(msg)
def run_sims(top_block_cls=eve_sim, options=None): #dictionary of modulations and indexes mod = {'BPSK':0, 'QPSK':1, '8PSK':2, '16QAM':3} #parameters when running a sim, could be organized into a class for a sim snr_db_ae = 10; signal_len = 1024; samp_rate = 100000; samples_to_check = 100; samples = 20000; all_snr_results = [] all_snr_results_percent = [] #sims is a list of topblocks sims = [] curr_message = '' output_file = open("confusion_matrix.csv", 'w') output_file_forplot = open("confusion_matrix_forplot.csv", 'w') snr_db_ae_range = np.arange(-10,10,0.5) for snr_db_ae in snr_db_ae_range: #2d lists to hold sim results (copies for results as nominal counts and percents) sim_results = [[0 for col in range(5)] for row in range(4)] sim_results_percent = [[0 for col in range(5)] for row in range(4)] sims = []; #iterate through the 4 possible modulations for mod_index in range(4): sims.append(top_block_cls(snr_db_ae, signal_len, samp_rate, samples, mod_index)) sims[mod_index].start() sims[mod_index].wait() sims[mod_index].stop() #tally recieved symbols and how they were classified for curr_samp in range(samples): curr_message = pmt.symbol_to_string(pmt.cdr(sims[mod_index].blocks_message_debug_0.get_message(curr_samp))) sim_results[mod_index][mod[curr_message]] +=1 sim_results[mod_index][4] +=1 #print output of the sim print 'SNR:',snr_db_ae print 'BPSK:',sim_results[mod['BPSK']] print 'QPSK:',sim_results[mod['QPSK']] print '8PSK:',sim_results[mod['8PSK']] print '16QAM:',sim_results[mod['16QAM']] #calculate the confusion matrix on a percent basis for curr_row in range(4): for curr_col in range(4): sim_results_percent[curr_row][curr_col] = float(sim_results[curr_row][curr_col]) / float(sim_results[curr_row][4]) sim_results_percent[curr_row][4] = 1 all_snr_results.append(sim_results) all_snr_results_percent.append(sim_results_percent) #write results to a file as a csv write_csv(sim_results, output_file) write_csv(sim_results_percent, output_file) print '\n' write_csv_forplot(all_snr_results_percent, snr_db_ae_range, samples, output_file_forplot) output_file.close() output_file_forplot.close()
def __init__(self, buf, len, debug, d_mac_id, d_seq_nr, no_self_loop): self.debug = debug self.buf = buf self.len = len self.d_seq_nr = d_seq_nr self.d_mac_id = d_mac_id self.no_self_loop = no_self_loop if self.no_self_loop: #Insert an id here to check for self routing. This makes the packet non standard. d_msg.insert(0, self.d_mac_id) d_msg.insert(1, 11 + self.len + 2) #FCF d_msg.insert(2, 0x41) d_msg.insert(3, 0x88) #seq nr d_msg.insert(4, ++self.d_seq_nr) #addr info d_msg.insert(5, 0xcd) d_msg.insert(6, 0xab) d_msg.insert(7, 0xff) d_msg.insert(8, 0xff) d_msg.insert(9, 0x40) d_msg.insert(10, 0xe8) #Copy the data here. if (pmt.is_vector(buf)): for i in range(pmt.length(buf)): d_msg.insert(10 + i, pmt.to_long(pmt.vector_ref(buf, i))) elif (pmt.is_uniform_vector(buf)): d_msg.extend(pmt.u8vector_elements(buf)) else: bufString = pmt.symbol_to_string(buf) #print "pmt.symbol_to_string(buf): ", bufString #print "pmt.length(buf): ", pmt.length(buf) bytes = map(ord, bufString) #print "map(ord,buf): ", bytes d_msg.extend(bytes) #Compute the CRC over the whole packet (excluding the CRC bytes themselves) crc = crc16(d_msg, self.len + 11) #if self.debug: print "#### CRC at Transmission: #### ", crc.get_crc() #CRC goes on the end. d_msg.insert(11 + self.len, crc.get_crc() & 0xFF) d_msg.insert(12 + self.len, crc.get_crc() >> 8) d_msg_len = 11 + self.len + 2 print print if self.debug: print "d_msg: ", d_msg print print else: #Preamble length + CRC length ( CRC at the end) d_msg.insert(0, 10 + self.len + 2) #FCF d_msg.insert(1, 0x41) d_msg.insert(2, 0x88) #seq nr d_msg.insert(3, ++self.d_seq_nr) #addr info d_msg.insert(4, 0xcd) d_msg.insert(5, 0xab) d_msg.insert(6, 0xff) d_msg.insert(7, 0xff) d_msg.insert(8, 0x40) d_msg.insert(9, 0xe8) #Copy the data here. d_msg.extend(pmt.u8vector_elements(buf)) #Compute the CRC over the whole packet (excluding the CRC bytes themselves) crc = crc16(d_msg, self.len + 10) if self.debug: print "#### CRC at Transmission: #### ", crc.get_crc() d_msg.insert(10 + self.len, crc.get_crc() & 0xFF) d_msg.insert(11 + self.len, crc.get_crc() >> 8) d_msg_len = 10 + self.len + 2 # Preamble + Data + CRC if self.debug: print " msg len ", d_msg_len, " len ", self.len, "\n"
def test_002_simpledfe(self): """ Use the simple DFE equalizer. """ fft_len = 8 # 4 5 6 7 0 1 2 3 tx_data = [ -1, -1, 1, 2, -1, 3, 0, -1, # 0 -1, -1, 0, 2, -1, 2, 0, -1, # 8 -1, -1, 3, 0, -1, 1, 0, -1, # 16 (Pilot symbols) -1, -1, 1, 1, -1, 0, 2, -1 ] # 24 cnst = digital.constellation_qpsk() tx_signal = [ cnst.map_to_points_v(x)[0] if x != -1 else 0 for x in tx_data ] occupied_carriers = ((1, 2, 6, 7), ) pilot_carriers = ((), (), (1, 2, 6, 7), ()) pilot_symbols = ([], [], [cnst.map_to_points_v(x)[0] for x in (1, 0, 3, 0)], []) equalizer = digital.ofdm_equalizer_simpledfe(fft_len, cnst.base(), occupied_carriers, pilot_carriers, pilot_symbols, 0, 0.01) equalizer_soft = digital.ofdm_equalizer_simpledfe( fft_len, cnst.base(), occupied_carriers, pilot_carriers, pilot_symbols, 0, 0.01, enable_soft_output=True) channel = [ 0, 0, 1, 1, 0, 1, 1, 0, # These coefficients will be rotated slightly... 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1j, 1j, 0, 1j, 1j, 0, # Go crazy here! 0, 0, 1j, 1j, 0, 1j, 1j, 0 # ...and again here. ] for idx in range(fft_len, 2 * fft_len): channel[idx] = channel[idx - fft_len] * \ numpy.exp(1j * .1 * numpy.pi * (numpy.random.rand() - .5)) idx2 = idx + 2 * fft_len channel[idx2] = channel[idx2] * \ numpy.exp(1j * 0 * numpy.pi * (numpy.random.rand() - .5)) chan_tag = gr.tag_t() chan_tag.offset = 0 chan_tag.key = pmt.string_to_symbol("ofdm_sync_chan_taps") chan_tag.value = pmt.init_c32vector(fft_len, channel[:fft_len]) src = blocks.vector_source_c(numpy.multiply(tx_signal, channel), False, fft_len, (chan_tag, )) eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, self.tsb_key, True) eq_soft = digital.ofdm_frame_equalizer_vcvc(equalizer_soft.base(), 0, self.tsb_key, True) sink = blocks.tsb_vector_sink_c(fft_len, tsb_key=self.tsb_key) sink_soft = blocks.tsb_vector_sink_c(fft_len, tsb_key=self.tsb_key) stream_to_tagged = blocks.stream_to_tagged_stream( gr.sizeof_gr_complex, fft_len, len(tx_data) // fft_len, self.tsb_key) self.tb.connect(src, stream_to_tagged, eq, sink) self.tb.connect(stream_to_tagged, eq_soft, sink_soft) self.tb.run() out_syms = numpy.array(sink.data()[0]) out_syms_soft = numpy.array(sink_soft.data()[0]) def demod(syms): return [ cnst.decision_maker_v((x, )) if x != 0 else -1 for x in syms ] rx_data = demod(out_syms) rx_data_soft = demod(out_syms_soft) # Uncomment to plot symbols #import matplotlib.pyplot as plt #def plot_syms(d): plt.figure(); plt.plot(d.real, d.imag, 'b.') # # plot_syms(out_syms) # plot_syms(out_syms_soft) # plt.show() self.assertEqual(tx_data, rx_data) self.assertEqual(rx_data, rx_data_soft) self.assertFalse(numpy.allclose(out_syms, out_syms_soft)) self.assertEqual(len(sink.tags()), 1) tag = sink.tags()[0] self.assertEqual(pmt.symbol_to_string(tag.key), "ofdm_sync_chan_taps") self.assertComplexTuplesAlmostEqual(list( pmt.c32vector_elements(tag.value)), channel[-fft_len:], places=1)
def test_001(self): # We're using a really simple preamble so that the correlation # is straight forward. preamble = [0, 0, 0, 1, 0, 0, 0] # Our pulse shape has this width (in units of symbols). pulse_width = 1.5 # The number of filters to use for resampling. n_filters = 12 sps = 3 data = [0] * 10 + preamble + [0] * 40 src = blocks.vector_source_c(data) # We want to generate taps with a sampling rate of sps=n_filters for resampling # purposes. pulse_shape = make_parabolic_pulse_shape(sps=n_filters, N=0.5, scale=35) # Create our resampling filter to generate the data for the correlator. shape = filter.pfb_arb_resampler_ccf(sps, pulse_shape, n_filters) # Generate the correlator block itself. correlator = digital.correlate_and_sync_cc(preamble, pulse_shape, sps, n_filters) # Connect it all up and go. snk = blocks.vector_sink_c() null = blocks.null_sink(gr.sizeof_gr_complex) tb = gr.top_block() tb.connect(src, shape, correlator, snk) tb.connect((correlator, 1), null) tb.run() # Look at the tags. Retrieve the timing offset. data = snk.data() offset = None timing_error = None for tag in snk.tags(): key = pmt.symbol_to_string(tag.key) if key == "time_est": offset = tag.offset timing_error = pmt.to_double(tag.value) if offset is None: raise ValueError("No tags found.") # Detect where the middle of the preamble is. # Assume we have only one peak and that it is symmetric. sum_id = 0 sum_d = 0 for i, d in enumerate(data): sum_id += i * abs(d) sum_d += abs(d) data_i = sum_id / sum_d if offset is not None: diff = data_i - offset remainder = -(diff % sps) if remainder < -sps / 2.0: remainder += sps tol = 0.2 difference = timing_error - remainder difference = difference % sps if abs(difference) >= tol: print( "Tag gives timing estimate of {0}. QA calculates it as {1}. Tolerance is {2}" .format(timing_error, remainder, tol)) self.assertTrue(abs(difference) < tol)
def get_string_from_tag(tag): return 'srcid=' + pmt.symbol_to_string( tag.srcid) + ', key=' + pmt.symbol_to_string( tag.key) + ', value=' + str(pmt.to_long( tag.value)) + ', offset=' + str(tag.offset)
def test_002_cfo_compensation(self): samp_rate = 30.72e6 freq_offset = 1000.0 * 10 n_frames = 2 burst_len = 383 gap_len = 53 tag_key = "energy_start" data = np.arange(burst_len) ref = np.array([], dtype=complex) tags = [] for i in range(n_frames): frame = np.ones(burst_len) * (i + 1) frame = frame.astype(np.complex) ref = np.concatenate((ref, frame)) phase = convert_frequency_to_phase_increment( freq_offset, samp_rate) sine = np.exp(1.0j * phase * np.arange(frame.size)) frame *= sine tag = gr.tag_t() tag.key = pmt.string_to_symbol(tag_key) tag.offset = burst_len + i * (burst_len + gap_len) tag.srcid = pmt.string_to_symbol("qa") tagvalue = {"sc_rot": complex(np.cos(phase), np.sin(phase))} tag.value = pmt.to_pmt(tagvalue) tags.append(tag) data = np.concatenate((data, frame, np.zeros(gap_len))) # print(np.reshape(data, (-1, burst_len))) # print('data len', len(data), 'ref len', len(ref)) src = blocks.vector_source_c(data, False, 1, tags) burster = gfdm.extract_burst_cc(burst_len, 0, tag_key, True) snk = blocks.vector_sink_c() self.tb.connect(src, burster, snk) self.tb.run() res = np.array(snk.data()) rx_tags = snk.tags() self.assertEqual(len(rx_tags), n_frames) for i, t in enumerate(rx_tags): self.assertEqual(pmt.symbol_to_string(t.key), tag_key) self.assertTrue(pmt.is_true(t.value)) self.assertEqual(pmt.symbol_to_string(t.srcid), burster.name()) self.assertEqual(t.offset, i * burst_len) # check data print(ref[0:10]) print(res[0:10]) reference_phase = convert_frequency_to_phase_increment( freq_offset, samp_rate) for i, left, right in zip(range(ref.size), ref, res): phase = np.angle(right) leftampl = np.abs(left) rightampl = np.abs(right) absdiff = np.abs(right - left) print( f"{i} {left:.7} == {right:.7}\t |{leftampl}|\t|{rightampl}|\tampl=={np.abs(leftampl - rightampl) < 1.0e-7}\t{absdiff=} != {absdiff < 1.0e-4}\t{phase:.7}" ) self.assertAlmostEqual(leftampl, rightampl, 4) self.assertLess(phase, 1.0e-6) print(f"{reference_phase=}") self.assertComplexTuplesAlmostEqual(ref, res, 4)
def handle_msg(self, msg): # Create a new PMT from long value and put in list tx_string = pmt.symbol_to_string(msg) self.ser.write(tx_string)
def print_tag(self, tag): my_string = "key = " + pmt.symbol_to_string( tag.key) + "\tsrcid = " + pmt.symbol_to_string(tag.srcid) my_string = my_string + "\tvalue = " + str(pmt.to_long(tag.value)) my_string = my_string + "\toffset = " + str(tag.offset) print my_string
def test_003_t(self): """ more advanced: - 6 symbols per carrier - 2 pilots per carrier - have enough data for nearly 3 OFDM symbols - send that twice - add some random tags - don't shift """ tx_symbols = list(range(1, 16)) # 15 symbols pilot_symbols = ((1j, 2j), (3j, 4j)) occupied_carriers = ( (1, 3, 4, 11, 12, 14), (1, 2, 4, 11, 13, 14), ) pilot_carriers = ((2, 13), (3, 12)) expected_result = list( (0, 1, 1j, 2, 3, 0, 0, 0, 0, 0, 0, 4, 5, 2j, 6, 0, 0, 7, 8, 3j, 9, 0, 0, 0, 0, 0, 0, 10, 4j, 11, 12, 0, 0, 13, 1j, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 0, 0)) fft_len = 16 testtag1 = gr.tag_t() testtag1.offset = 0 testtag1.key = pmt.string_to_symbol('tag1') testtag1.value = pmt.from_long(0) testtag2 = gr.tag_t() testtag2.offset = 7 # On the 2nd OFDM symbol testtag2.key = pmt.string_to_symbol('tag2') testtag2.value = pmt.from_long(0) testtag3 = gr.tag_t() testtag3.offset = len(tx_symbols) + 1 # First OFDM symbol of packet 2 testtag3.key = pmt.string_to_symbol('tag3') testtag3.value = pmt.from_long(0) testtag4 = gr.tag_t() # Last OFDM symbol of packet 2 testtag4.offset = 2 * len(tx_symbols) - 1 testtag4.key = pmt.string_to_symbol('tag4') testtag4.value = pmt.from_long(0) src = blocks.vector_source_c(tx_symbols * 2, False, 1, (testtag1, testtag2, testtag3, testtag4)) alloc = digital.ofdm_carrier_allocator_cvc(fft_len, occupied_carriers, pilot_carriers, pilot_symbols, (), self.tsb_key, False) sink = blocks.tsb_vector_sink_c(fft_len) self.tb.connect( src, blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, 1, len(tx_symbols), self.tsb_key), alloc, sink) self.tb.run() self.assertEqual(sink.data()[0], expected_result) tags_found = { 'tag1': False, 'tag2': False, 'tag3': False, 'tag4': False } correct_offsets = {'tag1': 0, 'tag2': 1, 'tag3': 3, 'tag4': 5} for tag in sink.tags(): key = pmt.symbol_to_string(tag.key) if key in list(tags_found.keys()): tags_found[key] = True self.assertEqual(correct_offsets[key], tag.offset) self.assertTrue(all(tags_found.values()))
def work(self, input_items, output_items): num_input_items = len(input_items[0]) tags = self.get_tags_in_window(0, 0, num_input_items) for tag in tags: self.key = pmt.symbol_to_string(tag.key) return num_input_items
def handle_msg(self, msg): global textboxValue textboxValue = pmt.symbol_to_string (msg)
def work(self, input_items, output_items): num_input_items = len(input_items[0]) out = output_items[0] # check to see if cbp has been updated if self.work_counter % 100 == 0: frame_index = 0+ self.cbp.channel_header_size_bytes frame_header = self.cbp.read_frame_header(frame_index) if frame_header.frame_id != self.frame_id: print(' New Frame! Updating data from channel backplane') self.read_cbp() self.work_counter = 0 self.work_counter += 1 # if tag exists: demod->decode->map->compare tags = self.get_tags_in_window(0, 0, num_input_items) # look for tags to indicate burst for tag in tags: tag_key = pmt.symbol_to_string(tag.key) if (tag_key == "Begin Burst"): self.process = True print('Attempting Demod on Burst') elif (tag_key == "End Burst"): self.process = False print('Stopping Demod, Burst Ended') print ('Total bits received / expected:', self.bit_counter, self.total_bits_transmitted) self.bit_counter = 0 # no burst detected, dont process samples if self.process == False: output_items[0][:] = np.zeros(num_input_items) return len(output_items[0]) self.bit_counter += num_input_items if num_input_items > self.bits_from_preamable_to_use: #print ('\n\n Preamble: ', list(self.preamble_bits[0:self.bits_from_preamable_to_use]), '\n\n Seq: \n\n', list(input_items[0]) ) correlations = self.find_subsequence(input_items[0], self.preamble_bits[self.preamble_start_index:self.preamble_stop_index]) if len(correlations) > 0: print ("============Correlated Preamble============ ", correlations) first_corr_input_index = correlations[0] last_corr_input_index = first_corr_input_index + self.bits_from_preamable_to_use bits_left_to_check = num_input_items - last_corr_input_index bits_left_unchecked_in_preamble = len(self.preamble_bits) - self.preamble_stop_index # can only check preamble remainder bits input_index = last_corr_input_index + 1 preamble_index = self.preamble_stop_index + 1 error_count = 0 bits_left_unchecked_in_payload = len(self.payload_bits) payload_index = 0 #print (first_corr_input_index, last_corr_input_index, bits_left_to_check, len(self.preamble_bits), stop, bits_left_unchecked_in_preamble, bits_left_unchecked_in_payload) # ber on all bits while bits_left_to_check > 1: # check remainder of preamble print('BER on Preamble Number of Bits ' + str(bits_left_to_check)) while bits_left_unchecked_in_preamble > 1 and bits_left_to_check > 1: #print('\n input index: ', input_index, '\n', 'preamble_index: ', preamble_index, '\n', 'bits left to check: ', bits_left_to_check, '\n', 'bits left in preamble: ', bits_left_unchecked_in_preamble, '\n') if input_items[0][input_index] != self.preamble_bits[preamble_index]: error_count+=1 bits_left_to_check -=1 input_index +=1 preamble_index +=1 bits_left_unchecked_in_preamble -=1 preamble_index = 0 bits_left_unchecked_in_preamble = len(self.preamble_bits) # check remainder of payload print('BER on Payload Number of Bits ' + str(bits_left_to_check)) while bits_left_unchecked_in_payload > 1 and bits_left_to_check > 1: if input_items[0][input_index] != self.payload_bits[payload_index]: error_count+=1 bits_left_to_check -=1 input_index +=1 payload_index +=1 bits_left_unchecked_in_payload -=1 payload_index = 0 bits_left_unchecked_in_payload = len(self.payload_bits) print('\n\n\n\n Error: ' + str(error_count) + ' out of ' + str(num_input_items - first_corr_input_index) ) print(' Error Rate: ' + str (float(error_count)/float(num_input_items-first_corr_input_index)) + '\n\n') return len(output_items[0])
def test_38(self): """ Test case generated by test-case generator """ ################################################## # Variables ################################################## # Input data into the system src_data = "PKdhtXMmr18n2L9K88eMlGn7CcctT9RwKSB1FebW397VI5uG1yhc3uavuaOb9vyJ" self.bw = bw = 250000 self.sf = sf = 8 self.samp_rate = samp_rate = 250000 self.pay_len = pay_len = 64 self.n_frame = n_frame = 2 self.impl_head = impl_head = False self.has_crc = has_crc = True self.frame_period = frame_period = 200 self.cr = cr = 6 ################################################## # Blocks ################################################## # Tx side self.lora_sdr_whitening_0 = lora_sdr.whitening() self.lora_sdr_modulate_0 = lora_sdr.modulate(sf, samp_rate, bw) self.lora_sdr_modulate_0.set_min_output_buffer(10000000) self.lora_sdr_interleaver_0 = lora_sdr.interleaver(cr, sf) self.lora_sdr_header_0 = lora_sdr.header(impl_head, has_crc, cr) self.lora_sdr_hamming_enc_0 = lora_sdr.hamming_enc(cr, sf) self.lora_sdr_gray_decode_0 = lora_sdr.gray_decode(sf) self.lora_sdr_data_source_0_1_0 = lora_sdr.data_source( pay_len, n_frame, src_data) self.lora_sdr_add_crc_0 = lora_sdr.add_crc(has_crc) self.blocks_null_sink_0 = blocks.null_sink(gr.sizeof_gr_complex * 1) self.blocks_message_strobe_random_0_1_0 = blocks.message_strobe_random( pmt.intern(''), blocks.STROBE_UNIFORM, frame_period, 5) # Rx side self.rational_resampler_xxx_0 = filter.rational_resampler_ccc( interpolation=4, decimation=1, taps=None, fractional_bw=None) self.lora_sdr_header_decoder_0 = lora_sdr.header_decoder( impl_head, cr, pay_len, has_crc) self.lora_sdr_hamming_dec_0 = lora_sdr.hamming_dec() self.lora_sdr_gray_enc_0 = lora_sdr.gray_enc() self.lora_sdr_frame_sync_0 = lora_sdr.frame_sync( samp_rate, bw, sf, impl_head) self.lora_sdr_fft_demod_0 = lora_sdr.fft_demod(samp_rate, bw, sf, impl_head) self.lora_sdr_dewhitening_0 = lora_sdr.dewhitening() self.lora_sdr_deinterleaver_0 = lora_sdr.deinterleaver(sf) self.lora_sdr_crc_verif_0 = lora_sdr.crc_verif() self.blocks_message_debug_0 = blocks.message_debug() self.blocks_throttle_0 = blocks.throttle(gr.sizeof_gr_complex * 1, samp_rate, True) ################################################## # Connections ################################################## # Tx side self.tb.msg_connect( (self.blocks_message_strobe_random_0_1_0, 'strobe'), (self.lora_sdr_data_source_0_1_0, 'trigg')) self.tb.msg_connect((self.lora_sdr_data_source_0_1_0, 'msg'), (self.lora_sdr_add_crc_0, 'msg')) self.tb.msg_connect((self.lora_sdr_data_source_0_1_0, 'msg'), (self.lora_sdr_header_0, 'msg')) self.tb.msg_connect((self.lora_sdr_data_source_0_1_0, 'msg'), (self.lora_sdr_interleaver_0, 'msg')) self.tb.msg_connect((self.lora_sdr_data_source_0_1_0, 'msg'), (self.lora_sdr_modulate_0, 'msg')) self.tb.msg_connect((self.lora_sdr_data_source_0_1_0, 'msg'), (self.lora_sdr_whitening_0, 'msg')) self.tb.connect((self.lora_sdr_add_crc_0, 0), (self.lora_sdr_hamming_enc_0, 0)) self.tb.connect((self.lora_sdr_gray_decode_0, 0), (self.lora_sdr_modulate_0, 0)) self.tb.connect((self.lora_sdr_hamming_enc_0, 0), (self.lora_sdr_interleaver_0, 0)) self.tb.connect((self.lora_sdr_header_0, 0), (self.lora_sdr_add_crc_0, 0)) self.tb.connect((self.lora_sdr_interleaver_0, 0), (self.lora_sdr_gray_decode_0, 0)) self.tb.connect((self.lora_sdr_whitening_0, 0), (self.lora_sdr_header_0, 0)) self.tb.connect((self.lora_sdr_modulate_0, 0), (self.blocks_throttle_0, 0)) # Rx side self.tb.connect((self.blocks_throttle_0, 0), (self.rational_resampler_xxx_0, 0)) self.tb.msg_connect((self.lora_sdr_crc_verif_0, 'msg'), (self.blocks_message_debug_0, 'store')) self.tb.msg_connect((self.lora_sdr_frame_sync_0, 'new_frame'), (self.lora_sdr_deinterleaver_0, 'new_frame')) self.tb.msg_connect((self.lora_sdr_frame_sync_0, 'new_frame'), (self.lora_sdr_dewhitening_0, 'new_frame')) self.tb.msg_connect((self.lora_sdr_frame_sync_0, 'new_frame'), (self.lora_sdr_fft_demod_0, 'new_frame')) self.tb.msg_connect((self.lora_sdr_frame_sync_0, 'new_frame'), (self.lora_sdr_hamming_dec_0, 'new_frame')) self.tb.msg_connect((self.lora_sdr_frame_sync_0, 'new_frame'), (self.lora_sdr_header_decoder_0, 'new_frame')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'pay_len'), (self.lora_sdr_crc_verif_0, 'pay_len')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'CRC'), (self.lora_sdr_crc_verif_0, 'CRC')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'CR'), (self.lora_sdr_deinterleaver_0, 'CR')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'pay_len'), (self.lora_sdr_dewhitening_0, 'pay_len')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'CRC'), (self.lora_sdr_dewhitening_0, 'CRC')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'CR'), (self.lora_sdr_fft_demod_0, 'CR')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'CR'), (self.lora_sdr_frame_sync_0, 'CR')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'err'), (self.lora_sdr_frame_sync_0, 'err')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'CRC'), (self.lora_sdr_frame_sync_0, 'crc')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'pay_len'), (self.lora_sdr_frame_sync_0, 'pay_len')) self.tb.msg_connect((self.lora_sdr_header_decoder_0, 'CR'), (self.lora_sdr_hamming_dec_0, 'CR')) self.tb.connect((self.lora_sdr_deinterleaver_0, 0), (self.lora_sdr_hamming_dec_0, 0)) self.tb.connect((self.lora_sdr_dewhitening_0, 0), (self.lora_sdr_crc_verif_0, 0)) self.tb.connect((self.lora_sdr_fft_demod_0, 0), (self.lora_sdr_gray_enc_0, 0)) self.tb.connect((self.lora_sdr_frame_sync_0, 0), (self.lora_sdr_fft_demod_0, 0)) self.tb.connect((self.lora_sdr_gray_enc_0, 0), (self.lora_sdr_deinterleaver_0, 0)) self.tb.connect((self.lora_sdr_hamming_dec_0, 0), (self.lora_sdr_header_decoder_0, 0)) self.tb.connect((self.lora_sdr_header_decoder_0, 0), (self.lora_sdr_dewhitening_0, 0)) self.tb.connect((self.rational_resampler_xxx_0, 0), (self.lora_sdr_frame_sync_0, 0)) # run the flowgraph, since we use a message strobe we have to run and stop the flowgraph with some computation time inbetween self.tb.start() time.sleep(10) self.tb.stop() self.tb.wait() # try to get get the message from the store port of the message debug printer and convert to string from pmt message try: msg = pmt.symbol_to_string( self.blocks_message_debug_0.get_message(0)) except: # if not possible set message to be None msg = None # check if message received is the same as the message decoded self.assertMultiLineEqual( src_data, msg, msg="Error decoded data {0} is not the same as input data {1}". format(msg, src_data))
def _assert_tags(self, tags: [ExpectedTag]): self.assertEqual(len(self.dst.tags()), len(tags)) for tag, expected_tag in zip(self.dst.tags(), tags): self.assertEqual(tag.offset, expected_tag.offset) self.assertEqual(pmt.symbol_to_string(tag.key), expected_tag.key) self.assertEqual(pmt.to_python(tag.value), expected_tag.value)
def test_19(self): """ Test case generated by test-case generator """ ################################################## # Variables ################################################## # Input data into the system src_data = "PKdhtXMmr18n2L9K88eMlGn7CcctT9RwKSB1FebW397VI5uG1yhc3uavuaOb9vyJ" self.bw = bw = 250000 self.sf = sf = 8 self.samp_rate = samp_rate = bw self.pay_len = pay_len = 64 self.n_frame = n_frame = 1 self.impl_head = impl_head = True self.has_crc = has_crc = False self.frame_period = frame_period = 200 self.cr = cr = 3 ################################################## # Blocks ################################################## self.lora_sdr_hier_tx_0 = lora_sdr.hier_tx(pay_len, n_frame,src_data , cr, sf, impl_head, has_crc, samp_rate, bw, 200, [8, 16] , True) self.lora_sdr_hier_rx_0_1_0_0_1_0 = lora_sdr.hier_rx(samp_rate, bw, sf, impl_head, cr, pay_len, has_crc, [8, 16] , True) self.interp_fir_filter_xxx_0_0 = filter.interp_fir_filter_ccf(4, (-0.128616616593872, -0.212206590789194, -0.180063263231421, 3.89817183251938e-17 ,0.300105438719035 ,0.636619772367581 ,0.900316316157106, 1 ,0.900316316157106, 0.636619772367581, 0.300105438719035, 3.89817183251938e-17, -0.180063263231421, -0.212206590789194, -0.128616616593872)) self.interp_fir_filter_xxx_0_0.declare_sample_delay(0) self.interp_fir_filter_xxx_0_0.set_min_output_buffer(20000) self.blocks_throttle_0_0 = blocks.throttle(gr.sizeof_gr_complex*1, samp_rate*10,True) #get the output self.blocks_message_debug_0 = blocks.message_debug() ################################################## # Connections ################################################## self.tb.connect((self.blocks_throttle_0_0, 0), (self.interp_fir_filter_xxx_0_0, 0)) self.tb.connect((self.interp_fir_filter_xxx_0_0, 0), (self.lora_sdr_hier_rx_0_1_0_0_1_0, 0)) self.tb.connect((self.lora_sdr_hier_tx_0, 0), (self.blocks_throttle_0_0, 0)) #output msg connection self.tb.msg_connect((self.lora_sdr_hier_rx_0_1_0_0_1_0, 'msg'), (self.blocks_message_debug_0, 'store')) def get_bw(self): return self.bw def set_bw(self, bw): with self._lock: self.bw = bw self.set_samp_rate(self.bw) def get_sf(self): return self.sf def set_sf(self, sf): with self._lock: self.sf = sf def get_samp_rate(self): return self.samp_rate def set_samp_rate(self, samp_rate): with self._lock: self.samp_rate = samp_rate self.blocks_throttle_0_0.set_sample_rate(self.samp_rate) def get_pay_len(self): return self.pay_len def set_pay_len(self, pay_len): with self._lock: self.pay_len = pay_len def get_n_frame(self): return self.n_frame def set_n_frame(self, n_frame): with self._lock: self.n_frame = n_frame def get_multi_control(self): return self.multi_control def set_multi_control(self, multi_control): with self._lock: self.multi_control = multi_control def get_mult_const(self): return self.mult_const def set_mult_const(self, mult_const): with self._lock: self.mult_const = mult_const def get_mean(self): return self.mean def set_mean(self, mean): with self._lock: self.mean = mean def get_impl_head(self): return self.impl_head def set_impl_head(self, impl_head): with self._lock: self.impl_head = impl_head def get_has_crc(self): return self.has_crc def set_has_crc(self, has_crc): with self._lock: self.has_crc = has_crc def get_frame_period(self): return self.frame_period def set_frame_period(self, frame_period): with self._lock: self.frame_period = frame_period def get_cr(self): return self.cr def set_cr(self, cr): with self._lock: self.cr = cr # run the flowgraph, since we use a message strobe we have to run and stop the flowgraph with some computation time inbetween self.tb.start() # time.sleep(10) # self.tb.stop() self.tb.wait() num_messages = self.blocks_message_debug_0.num_messages() if num_messages > 1: # try to get get the message from the store port of the message debug printer and convert to string from pmt message try: msg = pmt.symbol_to_string( self.blocks_message_debug_0.get_message(1)) except: # if not possible set message to be None msg = None else: # try to get get the message from the store port of the message debug printer and convert to string from pmt message try: msg = pmt.symbol_to_string( self.blocks_message_debug_0.get_message(0)) except: # if not possible set message to be None msg = None # check if message received is the same as the message decoded self.assertMultiLineEqual( src_data, msg, msg="Error decoded data {0} is not the same as input data {1}".format(msg, src_data))
def handle_msg(self, msg): self.items = bytearray.fromhex(pmt.symbol_to_string(msg)) self.send = 1
def __init__(self, fname='', add_metadata=False, metadata_format='', data_type='uint8', precision=0): gr.sync_block.__init__(self, name="csv_writer", in_sig=None, out_sig=None) self.fname = fname self.add_metadata = add_metadata self.metadata_format = metadata_format self.data_type = data_type self.precision = precision self.fid = None # setup logger logger_name = 'gr_log.' + self.to_basic_block().alias() if logger_name in gr.logger_get_names(): self.log = gr.logger(logger_name) else: self.log = gr.logger('log') # metadata field mappings self.metadata_mappings = { 'string': lambda x: pmt.symbol_to_string(x), 'bool': lambda x: pmt.to_bool(x), 'long': lambda x: pmt.to_long(x), 'uint64': lambda x: pmt.to_uint64(x), 'float': lambda x: pmt.to_float(x), 'double': lambda x: pmt.to_double(x), 'complex': lambda x: pmt.to_complex(x), 'time': lambda x: float(pmt.to_uint64(pmt.car(x))) + pmt.to_double( pmt.cdr(x)), 'time_tuple': lambda x: float(pmt.to_uint64(pmt.tuple_ref(x, 0))) + pmt. to_double(pmt.tuple_ref(x, 1)) } # data type parsers self.data_type_mappings = { 'uint8': lambda x: pmt.u8vector_elements(x), 'int8': lambda x: pmt.s8vector_elements(x), 'uint16': lambda x: pmt.u16vector_elements(x), 'int16': lambda x: pmt.s16vector_elements(x), 'uint32': lambda x: pmt.u32vector_elements(x), 'int32': lambda x: pmt.s32vector_elements(x), 'float': lambda x: pmt.f32vector_elements(x), 'complex float': lambda x: pmt.c32vector_elements(x), 'double': lambda x: pmt.f64vector_elements(x), 'complex double': lambda x: pmt.c64vector_elements(x) } # check data type if data_type not in self.data_type_mappings.keys(): raise ValueError('Invalid data type') self.find_metadata = False self.header = [] if self.add_metadata: if self.metadata_format == '': # set flag to load metadata on first message received self.find_metadata = True else: self.parse_header_format() # register message handler self.message_port_name = pmt.intern('in') self.message_port_register_in(self.message_port_name) self.set_msg_handler(self.message_port_name, self.message_handler)
def general_work(self, input_items, output_items): self.log.debug('\nFreq Sync') self.items_consumed = len(input_items[0]) self.items_created = 0 tags = self.get_tags_in_range( 0, self.nitems_read(0), self.nitems_read(0) + len(input_items[0])) for tag in tags: if pmt.symbol_to_string(tag.key) == 'MODE': mode = pmt.symbol_to_string(tag.value) if mode == 'B': self.mode = 2 else: self.log.debug('\tMode ' + mode + ' nicht implementiert') self.log.debug('\tMode ' + mode + ' eingestellt') if self.mode == 2: # Symbole ausschneiden symbol_1 = numpy.fft.fftshift(numpy.fft.fft( input_items[0][0:1024])) symbol_2 = numpy.fft.fftshift( numpy.fft.fft(input_items[0][1024:2048])) # Berechnung des Groben Frequenzfehlers freq_off = numpy.zeros([self.freq_range * 2 + 1], 'complex') for k in range(-self.freq_range, self.freq_range + 1): norm_a = 0 norm_b = 0 for s in self.ofdm_pilots: freq_off[k + self.freq_range] += numpy.multiply( symbol_1[512 + s + k], numpy.conj(symbol_2[512 + s + k])) norm_a += numpy.multiply(symbol_1[512 + s + k], numpy.conj(symbol_1[512 + s + k])) norm_b += numpy.multiply(symbol_2[512 + s + k], numpy.conj(symbol_2[512 + s + k])) freq_off[k + self.freq_range] = freq_off[ k + self.freq_range] / numpy.sqrt(norm_a * norm_b) freq_off = numpy.abs(freq_off) freq_off_est = numpy.argmax(freq_off) - self.freq_range if self.enable_integration: # Schätzer über Zeit integrireren # Nur möglich, wenn delta F konstant self.n_estimations += 1.0 N = self.n_estimations self.estimation_k = self.estimation_k * (N - 1) / N + ( freq_off_est / N) # Die Varianz des Schätzers wird als bekannt vorausgesetzt, bzw. sollte lieber über als unterschätzt werden self.confidence = self.estim_var_start / N freq_off_est = int(numpy.round(self.estimation_k)) # Frequenzfehler korregieren # Im Frequenzbereich eine einfache Verschiebung symbol_1 = self.shift_symbol(symbol_1, freq_off_est) symbol_2 = self.shift_symbol(symbol_2, freq_off_est) self.ffactor = 1 output_items[0][0:1024] = symbol_1 output_items[0][1024:2048] = symbol_2 self.items_consumed = 2048 self.items_created = 2048 self.log.debug('\tFreq Offset: ' + str(freq_off_est * 46.875)) self.log.info('\tGrober Freq Offset: ' + str(freq_off_est * 46.875)) self.log.debug('\tFreq Sync erfolgreich\n') self.consume(0, self.items_consumed) return self.items_created
def message_handler(self, msg): if not pmt.is_dict(msg): return try: # this will fail if message is a PDU with non-PMT_NIL arguments n = pmt.length(pmt.dict_items(msg)) # a PDU with one element equal to PMT_NIL still looks like a # dictionary...grrrrr! if (n == 1) and (pmt.equal(pmt.car(msg), pmt.PMT_NIL) or pmt.equal(pmt.cdr(msg), pmt.PMT_NIL)): # treat as a pdu car = pmt.car(msg) cdr = pmt.cdr(msg) else: car = msg cdr = pmt.init_u8vector(0, []) except: try: # message is a pdu pmt.length(pmt.dict_items(pmt.car(msg))) car = pmt.car(msg) cdr = pmt.cdr(msg) except: return if self.find_metadata: keys = pmt.dict_keys(car) self.header = [(pmt.nth(i, keys), pmt.symbol_to_string) for i in range(pmt.length(keys))] header = ','.join([ pmt.symbol_to_string(pmt.nth(i, keys)) for i in range(pmt.length(keys)) ]) if self.fid: self.fid.write(header + '\n') # ensure we no longer search for metadata self.find_metadata = False if self.fid: # add metadata if self.add_metadata: self.print_metadata(car) # cdr must be a uniform vector type if not pmt.is_uniform_vector(cdr): self.fid.write('\n') return # add data values = self.data_type_mappings[self.data_type](cdr) if (self.precision > 0) and (self.data_type in [ 'float', 'double', 'complex float', 'complex double' ]): self.fid.write(','.join( ['{:.{n}f}'.format(i, n=self.precision) for i in values])) else: self.fid.write(','.join([str(i) for i in values])) self.fid.write('\n')
def handle_msg(self, msg_pmt): # msg = pmt.cdr(msg_pmt) # msg_str = "".join([chr(x) for x in pmt.u8vector_elements(msg)]) msg_str = pmt.symbol_to_string(msg_pmt) print msg_pmt print msg_str
def work(self, input_items, output_items): if self.rx_state == RX_INIT: for usrp in ['uhd_source', 'uhd_sink']: self.post_msg( CTRL_PORT, pmt.string_to_symbol(usrp + '.set_center_freq'), pmt.from_python(((self.freq_list[self.hop_index], ), {})), pmt.string_to_symbol('fhss')) #print "DEBUG: Set frequency" self.rx_state = RX_SEARCH #check for msg inputs when work function is called if self.check_msg_queue(): try: msg = self.pop_msg_queue() except: return -1 # Check for pkts from higher layer (pkts to transmit) if msg.offset == OUTGOING_PKT_PORT: dst = int(pmt.blob_data(msg.value).tostring()[0]) if dst > self.max_neighbors: print "ERROR: DST-adr > number of channels!" elif self.neighbors[dst - 1] and dst != self.own_adr: self.dst_adr = dst self.queue.put( msg) # if outgoing, put in queue for processing else: print "ERROR: DST Node not in known neighborhood or own adr!" # Check for received pkts from deframer elif msg.offset == INCOMING_PKT_PORT: pkt = pmt.blob_data(msg.value) pkt_type, pkt_src, pkt_dst = pkt[0:3] handle_pkts = { HAS_DATA[0]: self.received_data, IS_RTS[0]: self.received_rts, IS_CTS[0]: self.received_cts, IS_BCN[0]: self.received_bcn } #print "DEBUG: MSG from ", pkt[1], " - to ", pkt[2], " type: ", pkt[0] if pkt_src != self.own_adr and pkt_dst in [ self.own_adr, self.bcst_adr ]: try: handle_pkts[pkt_type](pkt) except KeyError: print "ERROR: Wrong packet type detected!" #else: # print "Not addressed to this station - adr to: ", pkt[2] nread = self.nitems_read(0) # number of items read on port 0 ninput_items = len(input_items[0]) if not self.know_time: print "Waiting for time..." #process streaming samples and tags here #read all tags associated with port 0 for items tags = self.get_tags_in_range(0, nread, nread + ninput_items) #find all of our tags, making the adjustments to our timing for tag in tags: key_string = pmt.symbol_to_string(tag.key) if key_string == "rx_time": self.current_integer, self.current_fractional = pmt.to_python( tag.value) self.time_update = self.current_integer + self.current_fractional self.found_time = True print repr(self.time_update) elif key_string == "rx_rate": self.rate = pmt.to_python(tag.value) self.sample_period = 1.0 / self.rate self.found_rate = True if self.found_time and self.found_rate: self.know_time = True else: #get/update current time self.time_update += (self.sample_period * ninput_items) #print "DEBUG: time_update:", self.time_update, " - input_items:", ninput_items, " - samp-period", self.sample_period # Set first tuning time 20 sec in future (hope that we receive beacon # pkg within this time for sync -> assume that we're the only node if not) if self.time_tune_start == 0: print "Searching for neighbors..." self.interval_start = self.time_update + self.discovery_time self.time_tune_start = self.interval_start - (10 * self.post_guard) #determine if it's time for us to start tx'ing, start process #10 * self.post_guard before our slot actually begins (deal with latency) if self.time_update > self.time_tune_start: # Check for neighbors -> get free address if not self.discovery_finished: self.discovery_finished = True i = 0 while self.neighbors[i]: i += 1 self.own_adr = i + 1 print "Set own address to:", self.own_adr if self.own_adr != 1: # Wait another 20 sec for synchronization print "Waiting for synchronization..." self.interval_start = self.time_update + self.sync_time else: self.antenna_start = self.interval_start + self.pre_guard self.hop() # TODO: MOve most of the following stuff before # time_tune_start! handle_state = { IDLE: self.idle, GOT_RTS: self.got_rts, GOT_CTS: self.got_cts, WAITING_FOR_CTS: self.waiting_for_cts, WAITING_FOR_DATA: self.waiting_for_data } handle_state[self.state]() self.hops_to_beacon -= 1 self.interval_start += self.hop_interval self.time_tune_start = self.interval_start - (10 * self.post_guard) #print "Next Hop: ", int(math.floor(self.interval_start)), " - ", self.interval_start % 1, " ----- INDEX: ", self.hop_index return ninput_items
help='remote port') args = parser.parse_args() # Socket to talk to server context = zmq.Context() socket = context.socket(zmq.SUB) print 'Collecting updates from radio server at {} port {}...'.format( args.server, args.port) socket.connect('tcp://{}:{}'.format(args.server, args.port)) socket.setsockopt(zmq.SUBSCRIBE, '') data = RBDSData() try: while True: gnr_message_pmt = pmt.deserialize_str(socket.recv()) if pmt.is_tuple(gnr_message_pmt): msg_type = pmt.to_long(pmt.tuple_ref(gnr_message_pmt, 0)) msg = pmt.symbol_to_string(pmt.tuple_ref(gnr_message_pmt, 1)) data.update(msg_type, msg) print ansi_erase_display(2) + repr(data) + ansi_move_to(1, 1) else: print 'Encountered Data I Did Not Understand' except KeyboardInterrupt: print ansi_erase_display(2) + ansi_move_to( 1, 1) + "Shutdown requested...exiting" except Exception: traceback.print_exc(file=sys.stdout) sys.exit(0)
def test_002_simpledfe(self): """ Use the simple DFE equalizer. """ fft_len = 8 # 4 5 6 7 0 1 2 3 tx_data = [ -1, -1, 1, 2, -1, 3, 0, -1, # 0 -1, -1, 0, 2, -1, 2, 0, -1, # 8 -1, -1, 3, 0, -1, 1, 0, -1, # 16 (Pilot symbols) -1, -1, 1, 1, -1, 0, 2, -1 ] # 24 cnst = digital.constellation_qpsk() tx_signal = [ cnst.map_to_points_v(x)[0] if x != -1 else 0 for x in tx_data ] occupied_carriers = ((1, 2, 6, 7), ) pilot_carriers = ((), (), (1, 2, 6, 7), ()) pilot_symbols = ([], [], [cnst.map_to_points_v(x)[0] for x in (1, 0, 3, 0)], []) equalizer = digital.ofdm_equalizer_simpledfe(fft_len, cnst.base(), occupied_carriers, pilot_carriers, pilot_symbols, 0, 0.01) channel = [ 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, # These coefficients will be rotated slightly... 0, 0, 1j, 1j, 0, 1j, 1j, 0, # Go crazy here! 0, 0, 1j, 1j, 0, 1j, 1j, 0 # ...and again here. ] for idx in range(fft_len, 2 * fft_len): channel[idx] = channel[idx - fft_len] * numpy.exp( 1j * .1 * numpy.pi * (numpy.random.rand() - .5)) idx2 = idx + 2 * fft_len channel[idx2] = channel[idx2] * numpy.exp( 1j * 0 * numpy.pi * (numpy.random.rand() - .5)) chan_tag = gr.tag_t() chan_tag.offset = 0 chan_tag.key = pmt.string_to_symbol("ofdm_sync_chan_taps") chan_tag.value = pmt.init_c32vector(fft_len, channel[:fft_len]) src = blocks.vector_source_c(numpy.multiply(tx_signal, channel), False, fft_len, (chan_tag, )) eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, self.tsb_key, True) sink = blocks.tsb_vector_sink_c(fft_len, tsb_key=self.tsb_key) self.tb.connect( src, blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, fft_len, len(tx_data) / fft_len, self.tsb_key), eq, sink) self.tb.run() rx_data = [ cnst.decision_maker_v((x, )) if x != 0 else -1 for x in sink.data()[0] ] self.assertEqual(tx_data, rx_data) self.assertEqual(len(sink.tags()), 1) tag = sink.tags()[0] self.assertEqual(pmt.symbol_to_string(tag.key), "ofdm_sync_chan_taps") self.assertComplexTuplesAlmostEqual(list( pmt.c32vector_elements(tag.value)), channel[-fft_len:], places=1)