def test_003_12bits_formatter_object(self): # 3 PDUs: | | | | data = (1, 2, 3, 4, 1, 2, 1, 2, 3, 4) tagname = "packet_len" tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol(tagname) tag1.value = pmt.from_long(4) tag2 = gr.tag_t() tag2.offset = 4 tag2.key = pmt.string_to_symbol(tagname) tag2.value = pmt.from_long(2) tag3 = gr.tag_t() tag3.offset = 6 tag3.key = pmt.string_to_symbol(tagname) tag3.value = pmt.from_long(4) src = blocks.vector_source_b(data, False, 1, (tag1, tag2, tag3)) formatter_object = digital.packet_header_default(12, tagname) header = digital.packet_headergenerator_bb( formatter_object.formatter(), tagname) sink = blocks.vector_sink_b() self.tb.connect(src, header, sink) self.tb.run() expected_data = (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0) self.assertEqual(sink.data(), expected_data)
def test_003(self): offsets = (6, 3, 8) key = pmt.string_to_symbol('key') srcid = pmt.string_to_symbol('qa_tag_utils') tags = [] for k in offsets: t = gr.tag_t() t.offset = k t.key = key t.value = pmt.from_long(k) t.srcid = srcid tags.append(t) for k, t in zip(sorted(offsets), sorted(tags, key=gr.tag_t_offset_compare_key())): self.assertEqual(t.offset, k) self.assertTrue(pmt.equal(t.key, key)) self.assertTrue(pmt.equal(t.value, pmt.from_long(k))) self.assertTrue(pmt.equal(t.srcid, srcid)) tmin = min(tags, key=gr.tag_t_offset_compare_key()) self.assertEqual(tmin.offset, min(offsets)) self.assertTrue(pmt.equal(tmin.key, key)) self.assertTrue(pmt.equal(tmin.value, pmt.from_long(min(offsets)))) self.assertTrue(pmt.equal(tmin.srcid, srcid)) tmax = max(tags, key=gr.tag_t_offset_compare_key()) self.assertEqual(tmax.offset, max(offsets)) self.assertTrue(pmt.equal(tmax.key, key)) self.assertTrue(pmt.equal(tmax.value, pmt.from_long(max(offsets)))) self.assertTrue(pmt.equal(tmax.srcid, srcid))
def test_004_8bits_formatter_ofdm(self): occupied_carriers = ((1, 2, 3, 5, 6, 7), ) # 3 PDUs: | | | | data = (1, 2, 3, 4, 1, 2, 1, 2, 3, 4) tagname = "packet_len" tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol(tagname) tag1.value = pmt.from_long(4) tag2 = gr.tag_t() tag2.offset = 4 tag2.key = pmt.string_to_symbol(tagname) tag2.value = pmt.from_long(2) tag3 = gr.tag_t() tag3.offset = 6 tag3.key = pmt.string_to_symbol(tagname) tag3.value = pmt.from_long(4) src = blocks.vector_source_b(data, False, 1, (tag1, tag2, tag3)) formatter_object = digital.packet_header_ofdm(occupied_carriers, 1, tagname) self.assertEqual(formatter_object.header_len(), 6) self.assertEqual(pmt.symbol_to_string(formatter_object.len_tag_key()), tagname) header = digital.packet_headergenerator_bb( formatter_object.formatter(), tagname) sink = blocks.vector_sink_b() self.tb.connect(src, header, sink) self.tb.run() expected_data = (0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0) self.assertEqual(sink.data(), expected_data)
def test_002_tags_plus_data(self): packet_len = 16 src_data = range(packet_len) tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol('spam') tag1.value = pmt.from_long(23) tag2 = gr.tag_t() tag2.offset = 10 # Must be < packet_len tag2.key = pmt.string_to_symbol('eggs') tag2.value = pmt.from_long(42) src = blocks.vector_source_f(src_data, tags=(tag1, tag2)) s2ts = blocks.stream_to_tagged_stream(gr.sizeof_float, vlen=1, packet_len=packet_len, len_tag_key="packet_len") ts2pdu = blocks.tagged_stream_to_pdu(blocks.float_t, "packet_len") dbg = blocks.message_debug() self.tb.connect(src, s2ts, ts2pdu) self.tb.msg_connect(ts2pdu, "pdus", dbg, "store") self.tb.start() while dbg.num_messages() < 1: time.sleep(0.1) self.tb.stop() self.tb.wait() result_msg = dbg.get_message(0) metadata = pmt.to_python(pmt.car(result_msg)) vector = pmt.f32vector_elements(pmt.cdr(result_msg)) self.assertEqual(metadata, {'eggs': 42, 'spam': 23}) self.assertFloatTuplesAlmostEqual(tuple(vector), src_data)
def test_002_32bits (self): # 3 PDUs: | | | | data = (1, 2, 3, 4, 1, 2, 1, 2, 3, 4) tagname = "packet_len" tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol(tagname) tag1.value = pmt.from_long(4) tag2 = gr.tag_t() tag2.offset = 4 tag2.key = pmt.string_to_symbol(tagname) tag2.value = pmt.from_long(2) tag3 = gr.tag_t() tag3.offset = 6 tag3.key = pmt.string_to_symbol(tagname) tag3.value = pmt.from_long(4) src = blocks.vector_source_b(data, False, 1, (tag1, tag2, tag3)) header = digital.packet_headergenerator_bb(32, tagname) sink = blocks.vector_sink_b() self.tb.connect(src, header, sink) self.tb.run() expected_data = ( # | Number of symbols | Packet number | Parity 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ) self.assertEqual(sink.data(), expected_data)
def create_frame(config, tag_key): symbols = get_random_qpsk(config.timeslots * config.active_subcarriers) d_block = modulate_mapped_gfdm_block( symbols, config.timeslots, config.subcarriers, config.active_subcarriers, 2, 0.2, dc_free=True, ) preamble = config.full_preambles[0] frame = add_cyclic_starfix(d_block, config.cp_len, config.cs_len) frame = np.concatenate((preamble, frame)) tag = gr.tag_t() tag.key = pmt.string_to_symbol(tag_key) d = pmt.make_dict() d = pmt.dict_add(d, pmt.mp("xcorr_idx"), pmt.from_uint64(42)) d = pmt.dict_add(d, pmt.mp("xcorr_offset"), pmt.from_uint64(4711)) d = pmt.dict_add(d, pmt.mp("sc_rot"), pmt.from_complex(1.0 + 0.0j)) # tag.offset = data.size + cp_len tag.srcid = pmt.string_to_symbol("qa") tag.value = d return frame, symbols, tag
def work(self, input_items, output_items): in0 = input_items[0] out = output_items[0] to_consume = len(in0)-self.history() if self.state == self.states["waiting_for_fcch_tag"]: fcch_tags = [] start = self.nitems_written(0) stop = start + len(in0) key = pmt.string_to_symbol("fcch") fcch_tags = self.get_tags_in_range(0, start, stop, key) if fcch_tags: self.sch_offset = fcch_tags[0].offset + int(round(8*self.burst_size+0*self.guard_period)) #156.25 is number of GMSK symbols per timeslot, #8.25 is arbitrary safety margin in order to avoid cutting boundary of SCH burst self.state = self.states["reaching_sch_burst"] elif self.state == self.states["reaching_sch_burst"]: samples_left = self.sch_offset-self.nitems_written(0) if samples_left <= len(in0)-self.history(): to_consume = samples_left self.state = self.states["sch_at_input_buffer"] elif self.state == self.states["sch_at_input_buffer"]: offset = self.nitems_written(0) key = pmt.string_to_symbol("sch") value = pmt.from_double(0) self.add_item_tag(0,offset, key, value) self.state = self.states["waiting_for_fcch_tag"] self.sch_receiver.get_chan_imp_resp(in0[0:self.block_size+self.guard_period]) # plot(unwrap(angle(in0[0:2*self.block_size]))) # show() out[:] = in0[self.history()-1:] return to_consume
def test_additive_scrambler_tags(self): src_data = (1, ) * 1000 src = blocks.vector_source_b(src_data, False) scrambler = digital.additive_scrambler_bb(0x8a, 0x7f, 7, 100) descrambler = digital.additive_scrambler_bb(0x8a, 0x7f, 7, 100) reset_tag_key = 'reset_lfsr' reset_tag1 = gr.tag_t() reset_tag1.key = pmt.string_to_symbol(reset_tag_key) reset_tag1.offset = 17 reset_tag2 = gr.tag_t() reset_tag2.key = pmt.string_to_symbol(reset_tag_key) reset_tag2.offset = 110 reset_tag3 = gr.tag_t() reset_tag3.key = pmt.string_to_symbol(reset_tag_key) reset_tag3.offset = 523 src = blocks.vector_source_b(src_data, False, 1, (reset_tag1, reset_tag2, reset_tag3)) scrambler = digital.additive_scrambler_bb(0x8a, 0x7f, 7, 100, 1, reset_tag_key) descrambler = digital.additive_scrambler_bb(0x8a, 0x7f, 7, 100, 1, reset_tag_key) dst = blocks.vector_sink_b() self.tb.connect(src, scrambler, descrambler, dst) self.tb.run() self.assertEqual(src_data, dst.data())
def msg_handler(self, p): length = pmt.length(p) for i in range(0,length): element = pmt.nth(i, p) key = pmt.nth(0, element) value = pmt.nth(1, element) if str(key) == "power": output = pmt.f32vector_elements(value)[0] output = 10 * math.log(output, 10) output = pmt.make_f32vector(1, output) if i==0: outpmt = pmt.list1(pmt.list2(pmt.string_to_symbol(str(key) + "_dB"), output)) else: outpmt = pmt.list_add(outpmt, pmt.list2(pmt.string_to_symbol(str(key) + "_dB"), output)) output = pmt.nth(1, element) if i==0: outpmt = pmt.list1(pmt.list2(key, output)) else: outpmt = pmt.list_add(outpmt, pmt.list2(key, output)) self.message_port_pub(pmt.string_to_symbol("out"), outpmt)
def test_002_32bits (self): # 3 PDUs: | | | | data = (1, 2, 3, 4, 1, 2, 1, 2, 3, 4) tagname = "packet_len" tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol(tagname) tag1.value = pmt.from_long(4) tag2 = gr.tag_t() tag2.offset = 4 tag2.key = pmt.string_to_symbol(tagname) tag2.value = pmt.from_long(2) tag3 = gr.tag_t() tag3.offset = 6 tag3.key = pmt.string_to_symbol(tagname) tag3.value = pmt.from_long(4) src = blocks.vector_source_b(data, False, 1, (tag1, tag2, tag3)) header = digital.packet_headergenerator_bb(32, tagname) sink = blocks.vector_sink_b() self.tb.connect(src, header, sink) self.tb.run() expected_data = ( # | Number of symbols | Packet number | CRC 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1 ) self.assertEqual(sink.data(), expected_data)
def test_001_t (self): # set up message msg1 = pmt.list3(pmt.string_to_symbol('id'),pmt.string_to_symbol('int'),pmt.from_long(42)) msg2 = pmt.list3(pmt.string_to_symbol('value'),pmt.string_to_symbol('float'),pmt.from_float(3.1416)) #msg3 = pmt.list3(pmt.string_to_symbol('text'),pmt.string_to_symbol('string'),pmt.string_to_symbol('some text')) msg = pmt.list2(msg1,msg2) # set up sql connection host = '127.0.0.1' port = 0 # default user = '******' password = '******' database = 'my_db' table = 'my_table' # set up flowgraph if 0: # Enable and disable here msg_src = blocks.message_strobe(msg,100) sql_connector = sql.msg_to_table(user,password,database,table,host,port) self.tb.msg_connect(msg_src,'strobe',sql_connector,'Msg in') # run flowgraph self.tb.start() sleep(0.2) self.tb.stop() self.tb.wait()
def dice_csi_tags(self, data, type, num_inputs, num_tags, tag_pos, vlen=1): tags = [] expected_result = np.empty([np.size(data, 0)*np.size(data,1)], dtype=complex) if type == 'MMSE': # Add an SNR tag at the start of the stream for MMSE. tags.append(gr.tag_utils.python_to_tag((0, pmt.string_to_symbol("snr"), pmt.make_f32vector(num_inputs, 1e8), pmt.from_long(0)))) for i in range(0, num_tags): # Randomly generate CSI for one symbol. csi = (np.random.randn(vlen, num_inputs, num_inputs) + 1j * np.random.randn(vlen, num_inputs, num_inputs)) # Assign the CSI vector to a PMT vector. csi_pmt = pmt.make_vector(vlen, pmt.make_vector(num_inputs, pmt.make_c32vector(num_inputs, 1.0))) for k, carrier in enumerate(csi): carrier_vector_pmt = pmt.make_vector(num_inputs, pmt.make_c32vector(num_inputs, csi[k][0][0])) for l, rx in enumerate(csi[k]): line_vector_pmt = pmt.make_c32vector(num_inputs, csi[k][l][0]) for m, tx in enumerate(csi[k][l]): pmt.c32vector_set(v=line_vector_pmt, k=m, x=csi[k][l][m]) pmt.vector_set(carrier_vector_pmt, l, line_vector_pmt) pmt.vector_set(csi_pmt, k, carrier_vector_pmt) # Append stream tags with CSI to data stream. tags.append(gr.tag_utils.python_to_tag((tag_pos[i], pmt.string_to_symbol("csi"), csi_pmt, pmt.from_long(0)))) # Calculate expected result. expected_result[tag_pos[i]*num_inputs::] = np.reshape(np.transpose(np.dot(np.linalg.inv(csi), data[::, tag_pos[i]::])), (np.size(data, 0)*(np.size(data,1)-tag_pos[i]))) return tags, expected_result
def tx_data(self): """ Put messages from input into tx_queue. """ #TODO: Enable multi-hop transmissions -> less overhead! msg = self.queue.get() msg_byte_count = len(pmt.blob_data(msg.value)) + self.overhead if msg_byte_count >= self.bytes_per_slot: print "ERROR: Message too long!" else: #self.got_cts = False self.state = IDLE time_object = int(math.floor(self.antenna_start)), (self.antenna_start % 1) more_frames = 0 data = numpy.concatenate([HAS_DATA, self._to_byte_array(self.own_adr), self._to_byte_array(self.dst_adr), pmt.blob_data(msg.value)]) #print "DATA-SEND: %s" % data tx_object = time_object, data, more_frames #print "DEBUG: Sending DATA at", time_object #print "-----fre_list %s - hop-index %s" % (self.freq_list, self.hop_index) #print self.freq_msg self.post_msg(TO_FRAMER_PORT, pmt.string_to_symbol('full'), pmt.from_python(tx_object), pmt.string_to_symbol('tdma'))
def general_work(self, input_items, output_items): nread = self.nitems_read(0) # number of items read on port 0 ninput_items = len(input_items[0]) noutput_items = len(output_items[0]) in0 = input_items[0] out0 = output_items[0] for ii in range(noutput_items): if ii % self.start_sample() == 0 and self.forward: k = pmt.string_to_symbol(self.start_tag()) v = pmt.PMT_T src = pmt.string_to_symbol('burster') self.add_item_tag(0, self.offset, k, v, src) self.offset += self.start_sample() self.forward = not self.forward elif ii % self.end_sample() == 0 and not self.forward: k = pmt.string_to_symbol(self.end_tag()) v = pmt.PMT_T src = pmt.string_to_symbol('burster') self.add_item_tag(0, self.offset, k, v, src) self.forward = not self.forward self.offset += self.end_sample() nitems_to_consume = min(ninput_items, noutput_items) out0[:nitems_to_consume] = in0[:nitems_to_consume] self.consume(0, nitems_to_consume) return noutput_items
def general_work(self, input_items, output_items): # output_items[0][:] = input_items[0] # self.consume(0, len(input_items[0])) # Warte auf Frame Anfang # Sende Daten weiter # Berechnung der Korrelation # Um Mode unabhängig zu funktionieren müssen die referenzen als mehrdimensionale arrays gespeichert werden # und der Mode über den Tag bestimmt werden corr_val = 0 norm_a = 0 norm_b = 0 norm = 0 max_corr = 0 corr_val = 0 norm_a = 0 norm_b = 0 norm = 0 for s in range(0, len(self.frame_ref_n)): corr_val += numpy.multiply( input_items[0][512 + self.frame_ref_n[s]], numpy.conj(self.frame_ref[s])) norm_a += numpy.conj(self.frame_ref[s]) * self.frame_ref[s] norm_b += numpy.conj(input_items[0][512 + self.frame_ref_n[s]] ) * input_items[0][512 + self.frame_ref_n[s]] norm = numpy.sqrt(norm_a * norm_b) max_corr = corr_val / norm # Alternativ, ber. Anhand der Signalenergie # avg_amp = 0 # for s in self.frame_ref_n: # avg_amp += numpy.square(numpy.abs(input_items[0][512+s])) #print( 'Power: ' + str( avg_amp / len(self.frame_ref_n) )) # Funktion um die Korrelationswerte anzuzeigen, hilft zum justieren des Gernzwertes if self.print_values: print('Frame Korrelationsmaximum : ' + str(numpy.abs(max_corr))) # Zählt die Symbole seit dem letzten Frame beginn self.symbl_cnt += 1 # Wenn die Korrelation den gesetzten Grenzwert übersteit wird ein neuer Frame detektiert if max_corr > self.corr_limit: self.frame_cnt += 1 self.symbl_cnt = 0 # Tag hinzufügen key = pmt.string_to_symbol("FRAME") value = pmt.string_to_symbol(str(self.frame_cnt)) self.add_item_tag(0, self.nitems_written(0), key, value) self.initial_frame_detected = 1 # Sobald ein Frame detektiert wurde, werden die OFDM-Symbole durchgereicht if self.initial_frame_detected != 0 and self.symbl_cnt <= 16: output_items[0][0:1024] = input_items[0][0:1024] self.items_created = 1024 self.consume(0, 1024) return self.items_created
def tx_signaling(self, max_delay_in_slot, msg_type, dst_adr): """ Send signaling/control frames (no data). """ # Send after random amount of time in this bin/slot/hop delay = random.uniform(0, max_delay_in_slot) ant_start = self.antenna_start + delay time_msg = self._time_to_msg(self.interval_start) next_hop_index = numpy.array([self.hop_index], dtype='uint8') time_object = int(math.floor(ant_start)), (ant_start % 1) #print "DEBUG: Sending %s at %s" % (msg_type, time_object) #print "-----fre_list %s - hop-index %s" % (self.freq_list, self.hop_index) #print self.freq_msg # Create msg and add to tx_queue before calling transmit data = numpy.concatenate([ msg_type, self._to_byte_array(self.own_adr), self._to_byte_array(dst_adr), time_msg, next_hop_index ]) more_frames = 0 tx_object = time_object, data, more_frames self.post_msg(TO_FRAMER_PORT, pmt.string_to_symbol('full'), pmt.from_python(tx_object), pmt.string_to_symbol('tdma'))
def work(self, input_items, output_items): self.state = SEARCH_EOB out = output_items[0] in0 = input_items[0] out[:] = in0[:] #memcpy nread = self.nitems_read(0) #number of items read on port 0 ninput_items = len(in0) #read all tags associated with port 0 for items in this work function tags = self.get_tags_in_range(0, nread, nread + ninput_items) num_items = min(len(in0), len(out)) for tag in tags: if tag.key == pmt.string_to_symbol("tx_eob"): self.state = FOUND_EOB else: self.add_item_tag(0, tag.offset, tag.key, tag.value, pmt.string_to_symbol("len_tagger")) if self.state == FOUND_EOB: item_index = num_items #which output item gets the tag? offset = self.nitems_written(0) + item_index key = pmt.string_to_symbol("tx_eob") source = pmt.string_to_symbol("") self.add_item_tag(0, offset - 1, key, pmt.PMT_T, source) return len(out)
def tx_data(self): """ Put messages from input into tx_queue. """ #TODO: Enable multi-hop transmissions -> less overhead! msg = self.queue.get() msg_byte_count = len(pmt.blob_data(msg.value)) + self.overhead if msg_byte_count >= self.bytes_per_slot: print "ERROR: Message too long!" else: #self.got_cts = False self.state = IDLE time_object = int(math.floor( self.antenna_start)), (self.antenna_start % 1) more_frames = 0 data = numpy.concatenate([ HAS_DATA, self._to_byte_array(self.own_adr), self._to_byte_array(self.dst_adr), pmt.blob_data(msg.value) ]) #print "DATA-SEND: %s" % data tx_object = time_object, data, more_frames #print "DEBUG: Sending DATA at", time_object #print "-----fre_list %s - hop-index %s" % (self.freq_list, self.hop_index) #print self.freq_msg self.post_msg(TO_FRAMER_PORT, pmt.string_to_symbol('full'), pmt.from_python(tx_object), pmt.string_to_symbol('tdma'))
def test_004_8bits_formatter_ofdm (self): occupied_carriers = ((1, 2, 3, 5, 6, 7),) # 3 PDUs: | | | | data = (1, 2, 3, 4, 1, 2, 1, 2, 3, 4) tagname = "packet_len" tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol(tagname) tag1.value = pmt.from_long(4) tag2 = gr.tag_t() tag2.offset = 4 tag2.key = pmt.string_to_symbol(tagname) tag2.value = pmt.from_long(2) tag3 = gr.tag_t() tag3.offset = 6 tag3.key = pmt.string_to_symbol(tagname) tag3.value = pmt.from_long(4) src = blocks.vector_source_b(data, False, 1, (tag1, tag2, tag3)) formatter_object = digital.packet_header_ofdm(occupied_carriers, 1, tagname) self.assertEqual(formatter_object.header_len(), 6) self.assertEqual(pmt.symbol_to_string(formatter_object.len_tag_key()), tagname) header = digital.packet_headergenerator_bb(formatter_object.formatter(), tagname) sink = blocks.vector_sink_b() self.tb.connect(src, header, sink) self.tb.run() expected_data = ( 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ) self.assertEqual(sink.data(), expected_data)
def generate_tag(tag_key, srcid, value, offset): tag = gr.tag_t() tag.key = pmt.string_to_symbol(tag_key) tag.srcid = pmt.string_to_symbol(srcid) tag.value = pmt.from_long(value) tag.offset = offset return tag
def test_001b_simple_skip_nothing(self): """ Same as before, but put a skip-header in there """ fft_len = 8 equalizer = digital.ofdm_equalizer_static(fft_len, symbols_skipped=1) n_syms = 3 len_tag_key = "frame_len" tx_data = (1, ) * fft_len * n_syms len_tag = gr.tag_t() len_tag.offset = 0 len_tag.key = pmt.string_to_symbol(len_tag_key) len_tag.value = pmt.from_long(n_syms) chan_tag = gr.tag_t() chan_tag.offset = 0 chan_tag.key = pmt.string_to_symbol("ofdm_sync_chan_taps") chan_tag.value = pmt.init_c32vector(fft_len, (1, ) * fft_len) src = blocks.vector_source_c(tx_data, False, fft_len, (len_tag, chan_tag)) eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, len_tag_key) sink = blocks.vector_sink_c(fft_len) self.tb.connect(src, eq, sink) self.tb.run() # Check data self.assertEqual(tx_data, sink.data())
def test_003_tags_plus_data(self): packet_len = 16 src_data = list(range(packet_len)) tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol('spam') tag1.value = pmt.from_long(23) tag2 = gr.tag_t() tag2.offset = 10 # Must be < packet_len tag2.key = pmt.string_to_symbol('eggs') tag2.value = pmt.from_long(42) src = blocks.vector_source_f(src_data, tags=(tag1, tag2)) s2ts = blocks.stream_to_tagged_stream(gr.sizeof_float, vlen=1, packet_len=packet_len, len_tag_key="packet_len") ts2pdu = pdu.tagged_stream_to_pdu(gr.types.float_t, "packet_len") dbg = blocks.message_debug() self.tb.connect(src, s2ts, ts2pdu) self.tb.msg_connect(ts2pdu, "pdus", dbg, "store") self.tb.start() self.tb.wait() result_msg = dbg.get_message(0) metadata = pmt.to_python(pmt.car(result_msg)) vector = pmt.f32vector_elements(pmt.cdr(result_msg)) self.assertEqual(metadata, {'eggs': 42, 'spam': 23}) self.assertFloatTuplesAlmostEqual(tuple(vector), src_data)
def work(self, input_items, output_items): in0 = input_items[0] out = output_items[0] out[:] = in0[:] nread = self.nitems_read(0) #number of items read on port 0 ninput_items = len(in0) sob_t = pmt.string_to_symbol(self.start_tag) eob_t = pmt.string_to_symbol(self.end_tag) len_t = pmt.string_to_symbol(self.len_tag) value = pmt.from_bool(True) source = pmt.string_to_symbol("uhd_tags") tags = self.get_tags_in_range(0, nread, nread + ninput_items) for tag in tags: #print "key : " + str(tag.key) if str(tag.key) == self.len_tag: if DEBUG: print "Found burst start at offset : " + str( tag.offset) + " with len : " + str(tag.value) print " -> Injecting tag " + self.start_tag + " @" + str( tag.offset) print " -> Injecting tag " + self.end_tag + " @" + str( tag.offset + pmt.to_long(tag.value) - 1) self.add_item_tag(0, tag.offset, sob_t, value, source) self.add_item_tag(0, tag.offset + pmt.to_long(tag.value) - 1, eob_t, value, source) return len(out)
def __init__(self, period, serial_port, degrees_per_trigger, stop): gr.basic_block.__init__(self, name="turntable", in_sig = [], # Input signature: 1 float at a time out_sig = []) # Output signature: 1 float at a time #register message ports self.message_port_register_in(pmt.string_to_symbol("in")) self.message_port_register_out(pmt.string_to_symbol("out")) self.set_msg_handler(pmt.string_to_symbol("in"), self.msg_handler) self.d_period = period self.d_serial_port = serial_port self.d_degrees_per_trigger = degrees_per_trigger self.d_stop = stop #accumulated angle self.angle=0 #count up how many messages received self.counter=0 print "Opening serial port: " + self.d_serial_port if self.d_serial_port != "": self.ttctrl = control.control(self.d_serial_port) self.ttctrl.open()
def test_with_tags_2s_rolloff(self): " With tags and a 2-sample rolloff " fft_len = 8 cp_len = 2 tag_name = "length" expected_result = (7.0/2, 8, 1, 2, 3, 4, 5, 6, 7, 8, # 1.0/2 7.0/2+1.0/2, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1.0/2) tag = gr.tag_t() tag.offset = 0 tag.key = pmt.string_to_symbol(tag_name) tag.value = pmt.from_long(2) tag2 = gr.tag_t() tag2.offset = 1 tag2.key = pmt.string_to_symbol("random_tag") tag2.value = pmt.from_long(42) src = blocks.vector_source_c(range(1, fft_len+1) * 2, False, fft_len, (tag, tag2)) cp = digital.ofdm_cyclic_prefixer(fft_len, fft_len + cp_len, 2, tag_name) sink = blocks.vector_sink_c() self.tb.connect(src, cp, sink) self.tb.run() self.assertEqual(sink.data(), expected_result) tags = [gr.tag_to_python(x) for x in sink.tags()] tags = sorted([(x.offset, x.key, x.value) for x in tags]) expected_tags = [ (0, tag_name, len(expected_result)), (fft_len+cp_len, "random_tag", 42) ] self.assertEqual(tags, expected_tags)
def __init__(self, key, filename): gr.basic_block.__init__(self, name="message_print", in_sig=[], out_sig=[]) self.key = key self.filename=filename self.counter=0 #delete the contents of the file self.fdout = open(filename, "w") self.fdout.close() if key != "all": if filename != "": self.fdout = open(filename, "w") for i in self.key: self.fdout.write(i + ",") self.fdout.write("\n") self.fdout.close() #register message ports self.message_port_register_in(pmt.string_to_symbol("in")) self.set_msg_handler(pmt.string_to_symbol("in"), self.msg_handler)
def test_005_packet_len_tag (self): """ Standard test """ fft_len = 16 tx_symbols = range(1, 16); tx_symbols = (0, 1, 1j, 2, 3, 0, 0, 0, 0, 0, 0, 4, 5, 2j, 6, 0, 0, 7, 8, 3j, 9, 0, 0, 0, 0, 0, 0, 10, 4j, 11, 12, 0, 0, 13, 1j, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 0, 0) expected_result = tuple(range(1, 16)) occupied_carriers = ((1, 3, 4, 11, 12, 14), (1, 2, 4, 11, 13, 14),) n_syms = len(tx_symbols)/fft_len tag_name = "len" tag = gr.tag_t() tag.offset = 0 tag.key = pmt.string_to_symbol(tag_name) tag.value = pmt.from_long(n_syms) tag2 = gr.tag_t() tag2.offset = 0 tag2.key = pmt.string_to_symbol("packet_len") tag2.value = pmt.from_long(len(expected_result)) src = blocks.vector_source_c(tx_symbols, False, fft_len, (tag, tag2)) serializer = digital.ofdm_serializer_vcc(fft_len, occupied_carriers, tag_name, "packet_len", 0, "", False) sink = blocks.vector_sink_c() self.tb.connect(src, serializer, sink) self.tb.run () self.assertEqual(sink.data(), expected_result) self.assertEqual(len(sink.tags()), 1) result_tag = sink.tags()[0] self.assertEqual(pmt.symbol_to_string(result_tag.key), "packet_len") self.assertEqual(pmt.to_long(result_tag.value), len(expected_result))
def test_001_t(self): # set up message msg1 = pmt.list3(pmt.string_to_symbol('id'), pmt.string_to_symbol('int'), pmt.from_long(42)) msg2 = pmt.list3(pmt.string_to_symbol('value'), pmt.string_to_symbol('float'), pmt.from_float(3.1416)) #msg3 = pmt.list3(pmt.string_to_symbol('text'),pmt.string_to_symbol('string'),pmt.string_to_symbol('some text')) msg = pmt.list2(msg1, msg2) # set up sql connection host = '127.0.0.1' port = 0 # default user = '******' password = '******' database = 'my_db' table = 'my_table' # set up flowgraph if 0: # Enable and disable here msg_src = blocks.message_strobe(msg, 100) sql_connector = sql.msg_to_table(user, password, database, table, host, port) self.tb.msg_connect(msg_src, 'strobe', sql_connector, 'Msg in') # run flowgraph self.tb.start() sleep(0.2) self.tb.stop() self.tb.wait()
def tx_signaling(self, max_delay_in_slot, msg_type, dst_adr): """ Send signaling/control frames (no data). """ # Send after random amount of time in this bin/slot/hop delay = random.uniform(0, max_delay_in_slot) ant_start = self.antenna_start + delay time_msg = self._time_to_msg(self.interval_start) next_hop_index = numpy.array([self.hop_index], dtype='uint8') time_object = int(math.floor(ant_start)), (ant_start % 1) #print "DEBUG: Sending %s at %s" % (msg_type, time_object) #print "-----fre_list %s - hop-index %s" % (self.freq_list, self.hop_index) #print self.freq_msg # Create msg and add to tx_queue before calling transmit data = numpy.concatenate([msg_type, self._to_byte_array(self.own_adr), self._to_byte_array(dst_adr), time_msg, next_hop_index]) more_frames = 0 tx_object = time_object, data, more_frames self.post_msg(TO_FRAMER_PORT, pmt.string_to_symbol('full'), pmt.from_python(tx_object), pmt.string_to_symbol('tdma'))
def test_003_12bits_formatter_object (self): # 3 PDUs: | | | | data = (1, 2, 3, 4, 1, 2, 1, 2, 3, 4) tagname = "packet_len" tag1 = gr.tag_t() tag1.offset = 0 tag1.key = pmt.string_to_symbol(tagname) tag1.value = pmt.from_long(4) tag2 = gr.tag_t() tag2.offset = 4 tag2.key = pmt.string_to_symbol(tagname) tag2.value = pmt.from_long(2) tag3 = gr.tag_t() tag3.offset = 6 tag3.key = pmt.string_to_symbol(tagname) tag3.value = pmt.from_long(4) src = blocks.vector_source_b(data, False, 1, (tag1, tag2, tag3)) formatter_object = digital.packet_header_default(12, tagname) header = digital.packet_headergenerator_bb(formatter_object.formatter(), tagname) sink = blocks.vector_sink_b() self.tb.connect(src, header, sink) self.tb.run() expected_data = ( 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ) self.assertEqual(sink.data(), expected_data)
def test_005_packet_len_tag(self): """ Standard test """ fft_len = 16 tx_symbols = range(1, 16) tx_symbols = (0, 1, 1j, 2, 3, 0, 0, 0, 0, 0, 0, 4, 5, 2j, 6, 0, 0, 7, 8, 3j, 9, 0, 0, 0, 0, 0, 0, 10, 4j, 11, 12, 0, 0, 13, 1j, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 0, 0) expected_result = tuple(range(1, 16)) occupied_carriers = ( (1, 3, 4, 11, 12, 14), (1, 2, 4, 11, 13, 14), ) n_syms = len(tx_symbols) / fft_len tag_name = "len" tag = gr.tag_t() tag.offset = 0 tag.key = pmt.string_to_symbol(tag_name) tag.value = pmt.from_long(n_syms) tag2 = gr.tag_t() tag2.offset = 0 tag2.key = pmt.string_to_symbol("packet_len") tag2.value = pmt.from_long(len(expected_result)) src = blocks.vector_source_c(tx_symbols, False, fft_len, (tag, tag2)) serializer = digital.ofdm_serializer_vcc(fft_len, occupied_carriers, tag_name, "packet_len", 0, "", False) sink = blocks.vector_sink_c() self.tb.connect(src, serializer, sink) self.tb.run() self.assertEqual(sink.data(), expected_result) self.assertEqual(len(sink.tags()), 1) result_tag = sink.tags()[0] self.assertEqual(pmt.symbol_to_string(result_tag.key), "packet_len") self.assertEqual(pmt.to_long(result_tag.value), len(expected_result))
def work(self, input_items, output_items): in0 = input_items[0] out = output_items[0] out[:] = in0[:] nread = self.nitems_read(0) #number of items read on port 0 ninput_items = len(in0) eob = pmt.string_to_symbol("tx_eob") sob = pmt.string_to_symbol("tx_sob") pan = pmt.string_to_symbol("pdu_length") value = pmt.from_bool(1) lng = pmt.from_long(8192) source = pmt.string_to_symbol("add_uhd_tag") tags = self.get_tags_in_range(0, nread, nread+ninput_items) print "total input items : " + str(ninput_items) for tag in tags: #print "key : " + str(tag.key) if str(tag.key) == "pdu_length": #print "Found burst start at offset : " + str(tag.offset) + " with value : " + str(tag.value) #print " -> Injecting tag tx_sob @" + str(tag.offset) #print " -> Injecting tag tx_eob @" + str(tag.offset + pmt.to_long(tag.value) -1) self.add_item_tag(0, tag.offset, sob, value, source) self.add_item_tag(0, tag.offset + pmt.to_long(tag.value) -1, eob, value, source) return len(out)
def work(self, input_items, output_items): # warte auf ganzen FAC Block len_recvd = self.fac_recvd + len(input_items[0]) n_fac = int(numpy.floor(len_recvd / 65)) #print('FAC Symbols: ' + str(len(input_items[0])) + ' n=' + str(n_fac) ) for n in range(0, n_fac): missing_bits = 65 - self.fac_recvd self.fac_block[self.fac_recvd:65] = input_items[0][0:missing_bits] #print( self.fac_block ) #Demodulation bits = fac_dec.qam_slicer(self.fac_block) #print( bits ) #print( 'LEN: ' + str(len(bits)) ) # Bit Deinterleaving bits = fac_dec.bit_deinterleaving(bits) #print( 'DINTER LEN: ' + str(len(bits)) ) #Viterbi bits = fac_dec.dec_reform(bits) decoder = fac_dec.viterbi() bits = decoder.decode(bits) #print( 'LEN: ' + str(len(bits)) ) #Energy Dispersal bits = fac_dec.scrambler(bits[0:72]) #print( 'LEN: ' + str(len(bits)) ) # crc bits invertieren #crc = bits[64:72] crc = [int((x + 1) % 2) for x in bits[64:]] bits = [int(x) for x in bits[0:64]] if fac_dec.crc_check(bits[0:64] + crc): self.fac_decoder.init(bits[0:64]) #self.fac_decoder.print_fac() if self.spectrum_occupancy != self.fac_decoder.spectrum_occupancy: self.spectrum_occupancy = self.fac_decoder.spectrum_occupancy msg = pmt.cons( pmt.string_to_symbol('SO'), pmt.string_to_symbol(str(self.spectrum_occupancy))) self.message_port_pub(pmt.intern('fac_out'), msg) else: print(' FAC CRC Fehler! ') #FERTIG self.fac_recvd = 0 len_recvd -= 65 #print('FAC Decoded!') if len_recvd > 0: self.fac_block[0:len_recvd] = input_items[0][n_fac * 65:] return len(input_items[0])
def test_with_tags_2s_rolloff_multiples_cps(self): "Two CP lengths, 2-sample rolloff and tags." fft_len = 8 cp_lengths = (3, 2, 2) rolloff = 2 tag_name = "ts_last" expected_result = [ 6.0 / 2, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, # 1 7.0 / 2 + 1.0 / 2, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1.0 / 2 # Last tail ] # First test tag tag0 = gr.tag_t() tag0.offset = 0 tag0.key = pmt.string_to_symbol("first_tag") tag0.value = pmt.from_long(24) # Second test tag tag1 = gr.tag_t() tag1.offset = 1 tag1.key = pmt.string_to_symbol("second_tag") tag1.value = pmt.from_long(42) src = blocks.vector_source_c( list(range(1, fft_len + 1)) * 2, False, fft_len, (tag0, tag1)) cp = digital.ofdm_cyclic_prefixer(fft_len, cp_lengths, rolloff, tag_name) sink = blocks.tsb_vector_sink_c(tsb_key=tag_name) self.tb.connect( src, blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, fft_len, 2, tag_name), cp, sink) self.tb.run() self.assertEqual(sink.data()[0], expected_result) tags = [gr.tag_to_python(x) for x in sink.tags()] tags = sorted([(x.offset, x.key, x.value) for x in tags]) expected_tags = [(0, "first_tag", 24), (fft_len + cp_lengths[0], "second_tag", 42)] self.assertEqual(tags, expected_tags)
def __init__(self): gr.basic_block.__init__(self, name="power_to_db", in_sig=[], out_sig=[]) #register message ports self.message_port_register_in(pmt.string_to_symbol("in")) self.message_port_register_out(pmt.string_to_symbol("out")) self.set_msg_handler(pmt.string_to_symbol("in"), self.msg_handler)
def work(self, input_items, output_items): in0 = input_items[0] out = output_items[0] if self.got_fist_tag is not True: rx_rate_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_rate")) rx_time_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_time")) if len(rx_time_tags) > 0: self.got_fist_tag = True # self.rx_rate = mpf(pmt.to_double(rx_rate_tags[0].value)) self.rx_rate = pmt.to_double(rx_rate_tags[0].value) self.offset_prev = rx_time_tags[0].offset # self.rx_time_prev_secs = mpf(pmt.to_uint64(pmt.tuple_ref(rx_time_tags[0].value, 0))) # self.rx_time_prev_frac = mpf(pmt.to_double(pmt.tuple_ref(rx_time_tags[0].value, 1))) self.rx_time_prev_secs = pmt.to_uint64( pmt.tuple_ref(rx_time_tags[0].value, 0)) self.rx_time_prev_frac = pmt.to_double( pmt.tuple_ref(rx_time_tags[0].value, 1)) if len(rx_time_tags) > 1: print "Usupported situation - more than one tag in a single work(..) call" else: rx_time_tags = self.get_tags_in_window( 0, 0, len(in0), pmt.string_to_symbol("rx_time")) if len(rx_time_tags) > 0: tt = rx_time_tags[0] # print "Offset:",tt.offset," Offset_prev:",self.offset_prev," wartosc:",tt.value #compute number of zeros to add # self.rx_time_secs = mpf(pmt.to_uint64(pmt.tuple_ref(tt.value, 0))) # self.rx_time_frac = mpf(pmt.to_double(pmt.tuple_ref(tt.value, 1))) self.rx_time_secs = pmt.to_uint64(pmt.tuple_ref(tt.value, 0)) self.rx_time_frac = pmt.to_double(pmt.tuple_ref(tt.value, 1)) self.offset = tt.offset diff_offset = self.offset - self.offset_prev diff_offset_real = ( (self.rx_time_secs - self.rx_time_prev_secs) + (self.rx_time_frac - self.rx_time_prev_frac)) * self.rx_rate # print "self.rx_time_secs:",self.rx_time_secs,"self.rx_time_prev_frac:",self.rx_time_prev_frac zeros = diff_offset_real - diff_offset # print "diff_offset_real:",diff_offset_real,"diff_offset:",diff_offset print "Found a gap in the data at offset:", self.offset, " with length:", zeros, " [samps]" #save previous value self.offset_prev = self.offset self.rx_time_prev_secs = self.rx_time_secs self.rx_time_prev_frac = self.rx_time_frac if len(rx_time_tags) > 1: print "Usupported situation - more than one tag in a single work(..) call" out[:] = in0 return len(output_items[0])
def received_bcn(self, pkt): """ Called if a BCN packet was received """ # Sync to beacon if pkt is from node with higher prio! # Add Node to neighborhood table self.bcn_rx_no += 1 #print "DEBUG: BCN received no.", self.bcn_rx_no bcn_src = int(pkt[1]) if not self.neighbors[bcn_src - 1]: self.neighbors[bcn_src - 1] = True print "Node", bcn_src, "detected!" # TODO: DEMO-STUFF! # ---> Tell higher layer which nodes we've found (routing) known_hosts_msg = [107, 104, 58] for node in self.neighbors: if node is True: known_hosts_msg.append(43) else: known_hosts_msg.append(45) known_hosts_msg.append(10) blob = self.mgr.acquire(True) # block pmt.blob_resize(blob, len(known_hosts_msg)) pmt.blob_rw_data(blob)[:] = known_hosts_msg self.post_msg(APP_PORT, pmt.string_to_symbol('rx'), blob, pmt.string_to_symbol('fhss')) # Synchronization if (pkt[1] < self.own_adr and self.discovery_finished and not self.synced): self.interval_start = int(math.floor(self.time_update)) + (self._msg_to_time(pkt[3:11])[0] % 1) + (2 * self.hop_interval) #self.interval_start = self.time_update + (2 * self.hop_interval) #DEBUG print "BCN sent at", repr(self._msg_to_time(pkt[3:11])[0]), " time now", self.time_update #DEBUG print "interval start", self.interval_start # TODO: This is for DEBUGGING ONLY! #while self.interval_start > (self.time_update + 1) and self.interval_start < (self.time_update + 2): # print "+++Interval-Start increased!" # self.interval_start += 1 #while self.interval_start > (self.time_update - 1) and self.interval_start < (self.time_update): # self.interval_start -= 1 # print "---Interval-Start decreased!" # Send tune command before the USRP has to tune self.time_tune_start = self.interval_start - (10 * self.post_guard) self.hop_index = (pkt[11] + 1) % self.freq_list_length if self.hops_to_beacon != 0: self.hops_to_beacon -= 1 if not self.synced: print "SYNCED!" self.synced = True
def received_bcn(self, pkt): """ Called if a BCN packet was received """ # Sync to beacon if pkt is from node with higher prio! # Add Node to neighborhood table self.bcn_rx_no += 1 #print "DEBUG: BCN received no.", self.bcn_rx_no bcn_src = int(pkt[1]) if not self.neighbors[bcn_src - 1]: self.neighbors[bcn_src - 1] = True print "Node", bcn_src, "detected!" # TODO: DEMO-STUFF! # ---> Tell higher layer which nodes we've found (routing) known_hosts_msg = [107, 104, 58] for node in self.neighbors: if node is True: known_hosts_msg.append(43) else: known_hosts_msg.append(45) known_hosts_msg.append(10) blob = self.mgr.acquire(True) # block pmt.blob_resize(blob, len(known_hosts_msg)) pmt.blob_rw_data(blob)[:] = known_hosts_msg self.post_msg(APP_PORT, pmt.string_to_symbol('rx'), blob, pmt.string_to_symbol('fhss')) # Synchronization if (pkt[1] < self.own_adr and self.discovery_finished and not self.synced): self.interval_start = int(math.floor(self.time_update)) + ( self._msg_to_time(pkt[3:11])[0] % 1) + (2 * self.hop_interval) #self.interval_start = self.time_update + (2 * self.hop_interval) #DEBUG print "BCN sent at", repr(self._msg_to_time(pkt[3:11])[0]), " time now", self.time_update #DEBUG print "interval start", self.interval_start # TODO: This is for DEBUGGING ONLY! #while self.interval_start > (self.time_update + 1) and self.interval_start < (self.time_update + 2): # print "+++Interval-Start increased!" # self.interval_start += 1 #while self.interval_start > (self.time_update - 1) and self.interval_start < (self.time_update): # self.interval_start -= 1 # print "---Interval-Start decreased!" # Send tune command before the USRP has to tune self.time_tune_start = self.interval_start - (10 * self.post_guard) self.hop_index = (pkt[11] + 1) % self.freq_list_length if self.hops_to_beacon != 0: self.hops_to_beacon -= 1 if not self.synced: print "SYNCED!" self.synced = True
def handler(self, msg): value = pmt.cdr(msg) if not pmt.is_u8vector(value): return octets = pmt.to_python(value) packet = {} try: packet = Ax25_bytesToPacket(octets) line = "".join([chr(ci) for ci in packet.get("info", [])]) self.recvCnt += 1 except Exception as e: print("Error parsing AX.25: %s" % str(e)) return # Create JSON string. pktStr = str(packet) pktMsg = pmt.cons(pmt.PMT_NIL, pmt.string_to_symbol(pktStr)) # Create pretty string. src = packet.get("src", "None") dst = packet.get("dest", "None") reps = packet.get("repeaters", []) ctrl = packet.get("ctrl", {}) ptype = ctrl.get("type", "") proto = packet.get("proto", 0) info = "".join([chr(ci) for ci in packet.get("info", [])]) repStr = "" for rep in reps: repStr += "{}{} (ssid={})\n".format(" "*13, str(rep.get("callsign", "None")), rep.get("ssid", -1)) # String format. pktPretty = ("------------------------------\n" "Source: {} (ssid={})\n" "Destination: {} (ssid={})\n" "Type: {} (proto={})\n" "Repeaters:\n" "{}" "Info:\n" "{}\n").format(str(src.get("callsign", "None")), src.get("ssid", -1), str(dst.get("callsign", "None")), dst.get("ssid", -1), ptype, proto, repStr, info) pktPrettyMsg = pmt.cons(pmt.PMT_NIL, pmt.string_to_symbol(pktPretty)) print(pktStr) self.message_port_pub(pmt.intern("out"), msg) self.message_port_pub(pmt.intern("dict"), pktMsg) self.message_port_pub(pmt.intern("str"), pktPrettyMsg)
def general_work(self, input_items, output_items): self.generated_samples = 0 # Mode Selection if(self.mode == 0): self.mode_selection(input_items[0][0:1280+1280]) else: # Zeit Synchronisation self.time_sync( input_items[0][0:self.sym_len[self.mode]*2-1], 0, 1280 ) # Feine Frequenzsynchronisation if self.symbol_found == 1: if self.enable_integration: # Schätzer über Zeit integrireren # Nur möglich, wenn delta F konstant self.n_estimations_f += 1 N = self.n_estimations_f self.estimation_f = self.estimation_f * ( N - 1 ) / N + (self.fine_freq_off/N) # Die Varianz des Schätzers wird als bekannt vorausgesetzt, bzw. sollte lieber über als unterschätzt werden self.confidence_f = self.estim_var_f / N # Schätzer über Zeit integrireren # Nur möglich, wenn delta F konstant self.n_estimations_t += 1 N = self.n_estimations_t self.estimation_t = self.estimation_t * ( N - 1 ) / N + (self.symbol_start/N) # Die Varianz des Schätzers wird als bekannt vorausgesetzt, bzw. sollte lieber über als unterschätzt werden self.confidence_t = self.estim_var_t / N self.fine_freq_off = self.estimation_f self.symbol_start = int(numpy.round(self.estimation_t)) # Frequenzkorrektur output_items[0][0:self.generated_samples] = self.mixer(input_items[0][self.symbol_start:self.symbol_start + self.generated_samples ], -1*self.fine_freq_off, self.fs) # Wird der Symbolanfang direkt auf null gesetzt, wird bei der korrelation teilweise erst das 2. symbol erkannt self.symbol_start_offset = 256 - self.symbol_start self.estimation_t += self.symbol_start_offset # Tag hinzufügen if self.new_mode_detected == 1: key = pmt.string_to_symbol("MODE") value = pmt.string_to_symbol(self.modes[self.mode]) self.add_item_tag(0, self.nitems_written(0), key, value) self.new_mode_detected = 0 self.consume(0,self.sym_len[self.mode] - self.symbol_start_offset) self.symbol_start_offset = 0 return self.generated_samples
def work(self, input_items, output_items): num_output_items = len(output_items[0]) #put code here to fill the output items... #make a new tag on the middle element every time work is called count = self.nitems_written(0) + num_output_items // 2 key = pmt.string_to_symbol("example_key") value = pmt.string_to_symbol("example_value") self.add_item_tag(0, count, key, value) return num_output_items
def __init__(self, val): gr.basic_block.__init__(self, name="calibrate", in_sig=[], out_sig=[]) self.val = val self.minimum = val self.maximum = val + 2 * numpy.pi # register message ports self.message_port_register_in(pmt.string_to_symbol("in")) self.message_port_register_out(pmt.string_to_symbol("out")) self.set_msg_handler(pmt.string_to_symbol("in"), self.msg_handler) self.last_val = 0 self.subtract = 0
def work(self, input_items, output_items): num_output_items = len(output_items[0]) #put code here to fill the output items... #make a new tag on the middle element every time work is called count = self.nitems_written(0) + num_output_items/2 key = pmt.string_to_symbol("example_key") value = pmt.string_to_symbol("example_value") self.add_item_tag(0, count, key, value) return num_output_items
def test_003_t (self): """ more advanced: - 6 symbols per carrier - 2 pilots per carrier - have enough data for nearly 3 OFDM symbols - send that twice - add some random tags - don't shift """ tx_symbols = list(range(1, 16)); # 15 symbols pilot_symbols = ((1j, 2j), (3j, 4j)) occupied_carriers = ((1, 3, 4, 11, 12, 14), (1, 2, 4, 11, 13, 14),) pilot_carriers = ((2, 13), (3, 12)) expected_result = (0, 1, 1j, 2, 3, 0, 0, 0, 0, 0, 0, 4, 5, 2j, 6, 0, 0, 7, 8, 3j, 9, 0, 0, 0, 0, 0, 0, 10, 4j, 11, 12, 0, 0, 13, 1j, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 0, 0) fft_len = 16 testtag1 = gr.tag_t() testtag1.offset = 0 testtag1.key = pmt.string_to_symbol('tag1') testtag1.value = pmt.from_long(0) testtag2 = gr.tag_t() testtag2.offset = 7 # On the 2nd OFDM symbol testtag2.key = pmt.string_to_symbol('tag2') testtag2.value = pmt.from_long(0) testtag3 = gr.tag_t() testtag3.offset = len(tx_symbols)+1 # First OFDM symbol of packet 2 testtag3.key = pmt.string_to_symbol('tag3') testtag3.value = pmt.from_long(0) testtag4 = gr.tag_t() testtag4.offset = 2*len(tx_symbols)-1 # Last OFDM symbol of packet 2 testtag4.key = pmt.string_to_symbol('tag4') testtag4.value = pmt.from_long(0) src = blocks.vector_source_c(tx_symbols * 2, False, 1, (testtag1, testtag2, testtag3, testtag4)) alloc = digital.ofdm_carrier_allocator_cvc(fft_len, occupied_carriers, pilot_carriers, pilot_symbols, (), self.tsb_key, False) sink = blocks.tsb_vector_sink_c(fft_len) self.tb.connect(src, blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, 1, len(tx_symbols), self.tsb_key), alloc, sink) self.tb.run () self.assertEqual(sink.data()[0], expected_result) tags_found = {'tag1': False, 'tag2': False, 'tag3': False, 'tag4': False} correct_offsets = {'tag1': 0, 'tag2': 1, 'tag3': 3, 'tag4': 5} for tag in sink.tags(): key = pmt.symbol_to_string(tag.key) if key in list(tags_found.keys()): tags_found[key] = True self.assertEqual(correct_offsets[key], tag.offset) self.assertTrue(all(tags_found.values()))
def __init__(self, filename, key): gr.basic_block.__init__(self, name="real_value", in_sig=[], out_sig=[]) self.filename = filename self.key = key #register message ports self.message_port_register_in(pmt.string_to_symbol("in")) self.message_port_register_out(pmt.string_to_symbol("out")) self.set_msg_handler(pmt.string_to_symbol("in"), self.msg_handler)
def __init__(self, synctag, plentag, plen, streamtype): gr.basic_block.__init__(self, name="fixedlen_packet_synchronizer", in_sig=[streamtype], out_sig=[streamtype]) self.syncword_tag = pmt.string_to_symbol(synctag) self.packetlen_tag = pmt.string_to_symbol(plentag) self.packet_len = plen self.stream = collections.deque(maxlen=plen - 1) self.maxtag = 0 self.data = [] self.tags = [] self.written = 0
def __init__(self, syncword_tag, packetlen_tag, packet_len, stream_type): gr.basic_block.__init__(self, name="fixedlen_tagger", in_sig=[stream_type], out_sig=[stream_type]) self.syncword_tag = pmt.string_to_symbol(syncword_tag) self.packetlen_tag = pmt.string_to_symbol(packetlen_tag) self.packet_len = packet_len self.stream = collections.deque(maxlen=packet_len - 1) self.maxtag = 0 self.data = [] self.tags = [] self.written = 0
def normal_loop(self, name, id): # {{{ global portid portid = 200 logging.info("Decision block as Normal node") # Sets no MAC protocol at the beginning (portid = 200, none) self.message_port_pub(self.msg_port_ctrl_out, pmt.string_to_symbol('portid' + str(portid))) while True: time.sleep(self.metrics_gran) msg = "send_metrics" self.message_port_pub(self.msg_port_metrics_out, pmt.string_to_symbol(msg))
def test_004_connect (self): """ Advanced test: - Allocator -> IFFT -> Frequency offset -> FFT -> Serializer - FFT does shift (moves DC to middle) - Make sure input == output - Frequency offset is -2 carriers """ fft_len = 8 n_syms = 1 carr_offset = -2 freq_offset = 1.0 / fft_len * carr_offset # Normalized frequency occupied_carriers = ((-2, -1, 1, 2),) pilot_carriers = ((-3,),(3,)) pilot_symbols = ((1j,),(-1j,)) tx_data = (1, 2, 3, 4) tag_name = "len" tag = gr.tag_t() tag.offset = 0 tag.key = pmt.string_to_symbol(tag_name) tag.value = pmt.from_long(len(tx_data)) offsettag = gr.tag_t() offsettag.offset = 0 offsettag.key = pmt.string_to_symbol("ofdm_sync_carr_offset") offsettag.value = pmt.from_long(carr_offset) src = blocks.vector_source_c(tx_data, False, 1, (tag, offsettag)) alloc = digital.ofdm_carrier_allocator_cvc(fft_len, occupied_carriers, pilot_carriers, pilot_symbols, (), tag_name) tx_ifft = fft.fft_vcc(fft_len, False, (1.0/fft_len,)*fft_len, True) oscillator = analog.sig_source_c(1.0, analog.GR_COS_WAVE, freq_offset, 1.0/fft_len) mixer = blocks.multiply_cc() rx_fft = fft.fft_vcc(fft_len, True, (), True) sink2 = blocks.vector_sink_c(fft_len) self.tb.connect(rx_fft, sink2) serializer = digital.ofdm_serializer_vcc( alloc, "", 0, "ofdm_sync_carr_offset", True ) sink = blocks.vector_sink_c() self.tb.connect( src, alloc, tx_ifft, blocks.vector_to_stream(gr.sizeof_gr_complex, fft_len), (mixer, 0), blocks.stream_to_vector(gr.sizeof_gr_complex, fft_len), rx_fft, serializer, sink ) self.tb.connect(oscillator, (mixer, 1)) self.tb.run () self.assertComplexTuplesAlmostEqual(sink.data()[-len(occupied_carriers[0]):], tx_data, places=4)
def test_001_t(self): n_frames = 20 timeslots = 9 subcarriers = 128 active_subcarriers = 110 cp_len = subcarriers // 2 smap = get_subcarrier_map(subcarriers, active_subcarriers) seed = 4711 ftype = 'rrc' falpha = .5 tag_key = 'frame_start' preamble, x_preamble = mapped_preamble(seed, ftype, falpha, active_subcarriers, subcarriers, smap, 2, cp_len, cp_len // 2) block_len = timeslots * subcarriers offset = len(preamble) + cp_len frame_len = len(preamble) + timeslots * subcarriers + cp_len data = np.array([], dtype=np.complex) ref = np.array([], dtype=np.complex) tags = [] print 'frame_len: ', frame_len for i in range(n_frames): d_block = modulate_mapped_gfdm_block( get_random_qpsk(timeslots * active_subcarriers), timeslots, subcarriers, active_subcarriers, 2, falpha) frame = pinch_cp_add_block(d_block, timeslots, subcarriers, cp_len, cp_len // 2) frame = np.concatenate((preamble, frame)) r = frame[offset:offset + block_len] ref = np.concatenate((ref, r)) tag = gr.tag_t() tag.key = pmt.string_to_symbol(tag_key) tag.offset = len(data) tag.srcid = pmt.string_to_symbol('qa') tag.value = pmt.from_long(block_len) tags.append(tag) data = np.concatenate((data, frame)) src = blocks.vector_source_c(data, False, 1, tags) cp_rm = gfdm.remove_prefix_cc(frame_len, block_len, offset, tag_key) snk = blocks.vector_sink_c() self.tb.connect(src, cp_rm, snk) self.tb.run() # # check data res = np.array(snk.data()) tags = snk.tags() self.assertTrue(len(tags) == 0) # propagation policy is TPP_DONT self.assertComplexTuplesAlmostEqual(res, ref, 5)
def test_1(self): datas = ( 0, 1, 2, 5, 6, 10, 14, 15, 16, 3, 4, 7, 8, 9, 11, 12, 13, 17 ) expected = tuple(range(18)) tagname = "packet_length" len_tags_0 = ( make_len_tag(0, tagname, 3), make_len_tag(3, tagname, 2), make_len_tag(5, tagname, 1), make_len_tag(6, tagname, 3) ) len_tags_1 = ( make_len_tag(0, tagname, 2), make_len_tag(2, tagname, 3), make_len_tag(5, tagname, 3), make_len_tag(8, tagname, 1) ) test_tag_0 = gr.tag_t() test_tag_0.key = pmt.string_to_symbol('spam') test_tag_0.offset = 4 # On the second '1' test_tag_0.value = pmt.to_pmt(42) test_tag_1 = gr.tag_t() test_tag_1.key = pmt.string_to_symbol('eggs') test_tag_1.offset = 3 # On the first '3' of the 2nd stream test_tag_1.value = pmt.to_pmt(23) src0 = blocks.vector_source_b(datas[0:9], False, 1, len_tags_0 + (test_tag_0,)) src1 = blocks.vector_source_b(datas[9:], False, 1, len_tags_1 + (test_tag_1,)) tagged_stream_mux = blocks.tagged_stream_mux(gr.sizeof_char, tagname) snk = blocks.vector_sink_b() self.tb.connect(src0, (tagged_stream_mux, 0)) self.tb.connect(src1, (tagged_stream_mux, 1)) self.tb.connect(tagged_stream_mux, snk) self.tb.run() self.assertEqual(expected, snk.data()) tags = [gr.tag_to_python(x) for x in snk.tags()] tags = sorted([(x.offset, x.key, x.value) for x in tags]) tags_expected = [ (0, 'packet_length', 5), (5, 'packet_length', 5), (6, 'spam', 42), (8, 'eggs', 23), (10, 'packet_length', 4), (14, 'packet_length', 4) ] self.assertEqual(tags, tags_expected)
def test_001_t (self): # set up pmt in_2 = (-1,2,-3,4,5,0) in_3 = (1,2,3,4,5,0) in_4 = (1,-2.1,3,-4.2,5.9,0) pmt_1 = pmt.list2(pmt.string_to_symbol("rx_time"),pmt.from_long(0)) pmt_2 = pmt.list2(pmt.string_to_symbol("test"),pmt.init_f32vector(6,in_2)) pmt_3 = pmt.list2(pmt.string_to_symbol("velocity"),pmt.init_f32vector(6,in_3)) pmt_4 = pmt.list2(pmt.string_to_symbol("test2"),pmt.init_f32vector(6,in_4)) pmt_in = pmt.list4(pmt_1,pmt_2,pmt_3,pmt_4) # set up fg symbols = ("test", "test2") const_add = (1, -2) const_mult = (-5, 1) strobe = blocks.message_strobe(pmt_in,400); test = radar.msg_manipulator(symbols,const_add,const_mult) debug = blocks.message_debug() self.tb.msg_connect(strobe, "strobe", test, "Msg in") self.tb.msg_connect(test, "Msg out", debug, "store") self.tb.msg_connect(test, "Msg out", debug, "print") # run fg self.tb.start() sleep(0.5) self.tb.stop() self.tb.wait() # check data msg = debug.get_message(0) out_2 = pmt.f32vector_elements(pmt.nth(1,pmt.nth(1,msg))) out_3 = pmt.f32vector_elements(pmt.nth(1,pmt.nth(2,msg))) out_4 = pmt.f32vector_elements(pmt.nth(1,pmt.nth(3,msg))) ref_2 = [0]*6 ref_3 = [0]*6 ref_4 = [0]*6 for k in range(6): ref_2[k] = (in_2[k]+const_add[0])*const_mult[0] ref_3[k] = in_3[k] ref_4[k] = (in_4[k]+const_add[1])*const_mult[1] for k in range(6): # do asserts self.assertAlmostEqual(ref_2[k],out_2[k],3) self.assertAlmostEqual(ref_3[k],out_3[k],3) self.assertAlmostEqual(ref_4[k],out_4[k],3)
def test_002_simpledfe (self): """ Use the simple DFE equalizer. """ fft_len = 8 # 4 5 6 7 0 1 2 3 tx_data = [-1, -1, 1, 2, -1, 3, 0, -1, # 0 -1, -1, 0, 2, -1, 2, 0, -1, # 8 -1, -1, 3, 0, -1, 1, 0, -1, # 16 (Pilot symbols) -1, -1, 1, 1, -1, 0, 2, -1] # 24 cnst = digital.constellation_qpsk() tx_signal = [cnst.map_to_points_v(x)[0] if x != -1 else 0 for x in tx_data] occupied_carriers = ((1, 2, 6, 7),) pilot_carriers = ((), (), (1, 2, 6, 7), ()) pilot_symbols = ( [], [], [cnst.map_to_points_v(x)[0] for x in (1, 0, 3, 0)], [] ) equalizer = digital.ofdm_equalizer_simpledfe( fft_len, cnst.base(), occupied_carriers, pilot_carriers, pilot_symbols, 0, 0.01 ) channel = [ 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, # These coefficients will be rotated slightly... 0, 0, 1j, 1j, 0, 1j, 1j, 0, # Go crazy here! 0, 0, 1j, 1j, 0, 1j, 1j, 0 # ...and again here. ] for idx in range(fft_len, 2*fft_len): channel[idx] = channel[idx-fft_len] * numpy.exp(1j * .1 * numpy.pi * (numpy.random.rand()-.5)) idx2 = idx+2*fft_len channel[idx2] = channel[idx2] * numpy.exp(1j * 0 * numpy.pi * (numpy.random.rand()-.5)) len_tag_key = "frame_len" len_tag = gr.tag_t() len_tag.offset = 0 len_tag.key = pmt.string_to_symbol(len_tag_key) len_tag.value = pmt.from_long(4) chan_tag = gr.tag_t() chan_tag.offset = 0 chan_tag.key = pmt.string_to_symbol("ofdm_sync_chan_taps") chan_tag.value = pmt.init_c32vector(fft_len, channel[:fft_len]) src = blocks.vector_source_c(numpy.multiply(tx_signal, channel), False, fft_len, (len_tag, chan_tag)) eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, len_tag_key, True) sink = blocks.vector_sink_c(fft_len) self.tb.connect(src, eq, sink) self.tb.run () rx_data = [cnst.decision_maker_v((x,)) if x != 0 else -1 for x in sink.data()] self.assertEqual(tx_data, rx_data) for tag in sink.tags(): if pmt.symbol_to_string(tag.key) == len_tag_key: self.assertEqual(pmt.to_long(tag.value), 4) if pmt.symbol_to_string(tag.key) == "ofdm_sync_chan_taps": self.assertComplexTuplesAlmostEqual(list(pmt.c32vector_elements(tag.value)), channel[-fft_len:], places=1)
def test_001_t (self): """ First header: Packet length 4, packet num 0 Second header: Packet 2, packet num 1 Third header: Invalid (parity bit does not check) (would be len 4, num 2) """ encoded_headers = ( # | Number of bytes | Packet number | Parity 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ) packet_len_tagname = "packet_len" random_tag = gr.tag_t() random_tag.offset = 5 random_tag.key = pmt.string_to_symbol("foo") random_tag.value = pmt.from_long(42) src = blocks.vector_source_b(encoded_headers, tags=(random_tag,)) parser = digital.packet_headerparser_b(32, packet_len_tagname) sink = blocks.message_debug() self.tb.connect(src, parser) self.tb.msg_connect(parser, "header_data", sink, "store") self.tb.start() time.sleep(1) self.tb.stop() self.tb.wait() self.assertEqual(sink.num_messages(), 3) msg1 = pmt.to_python(sink.get_message(0)) msg2 = pmt.to_python(sink.get_message(1)) msg3 = pmt.to_python(sink.get_message(2)) self.assertEqual(msg1, {'packet_len': 4, 'packet_num': 0, 'foo': 42}) self.assertEqual(msg2, {'packet_len': 2, 'packet_num': 1}) self.assertEqual(msg3, False)
def test_001_t (self): count = 2 interval = 1000 msg_list = [pmt.string_to_symbol('hello')] * count src_data = [1.0] * (count * interval) src = blocks.vector_source_f(src_data, False) msg_gen = message_generator(msg_list, interval) msg_cons = varicode_enc_b() dest = blocks.vector_sink_b() self.tb.connect(src, msg_gen) self.tb.connect(msg_cons, dest) self.tb.msg_connect(msg_gen, 'out_port', msg_cons, 'in_port') self.tb.run() print "Msg Ctr:", msg_gen.msg_ctr while msg_gen.msg_ctr < count: print "Msg Ctr:", msg_gen.msg_ctr time.sleep(0.5) print "Msg Ctr:", msg_gen.msg_ctr self.tb.stop() self.tb.wait() print 'Output Data:', dest.data()
def test_0010_tag_propagation (self): """ Make sure tags on the CRC aren't lost. """ # Data with precalculated CRC data = ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1 ) # 2, 67, 225, 188 testtag = gr.tag_t() testtag.offset = len(data)-1 testtag.key = pmt.string_to_symbol('tag1') testtag.value = pmt.from_long(0) src = blocks.vector_source_b(data, False, 1, (testtag,)) crc_check = digital.crc32_bb(True, self.tsb_key, False) sink = blocks.tsb_vector_sink_b(tsb_key=self.tsb_key) self.tb.connect( src, blocks.stream_to_tagged_stream(gr.sizeof_char, 1, len(data), self.tsb_key), crc_check, sink ) self.tb.run() self.assertEqual([len(data)-33,], [tag.offset for tag in sink.tags() if pmt.symbol_to_string(tag.key) == 'tag1'])