def timer_tick(self): if (self.has_gps_sensor): gps_gpgga = self.uhd_dev.get_mboard_sensor('gps_gpgga').value gps_gprmc = self.uhd_dev.get_mboard_sensor('gps_gprmc').value gps_time = self.uhd_dev.get_mboard_sensor('gps_time').to_int() gps_locked = self.uhd_dev.get_mboard_sensor('gps_locked').to_bool() pps_seconds = self.uhd_dev.get_time_last_pps().to_ticks(1.0) uhd_time_set = (pps_seconds == gps_time) gps_data = pmt.make_dict() gps_data = pmt.dict_add(gps_data, pmt.intern('gps_time'), pmt.to_pmt(gps_time)) gps_data = pmt.dict_add(gps_data, pmt.intern('gps_locked'), pmt.to_pmt(gps_locked)) gps_data = pmt.dict_add(gps_data, pmt.intern('usrp_time_is_absolute'), pmt.to_pmt(uhd_time_set)) gps_data = pmt.dict_add(gps_data, pmt.intern('usrp_time'), pmt.to_pmt(pps_seconds)) self.message_port_pub(pmt.intern('gps_data'), gps_data) if self.udp_socket: # send UDP self.udp_socket.sendto("\r\n" + gps_gprmc + "\r\n" + gps_gpgga, ("127.0.0.1", self.udp_port)) self.timer = Timer(self.update_rate, self.timer_tick) self.timer.start()
def test_002_callbacks (self): in_data = [0, 0, 0, 0] in_meta = pmt.make_dict() expected_meta1 = pmt.dict_add(in_meta, pmt.intern('num'), pmt.from_long(4)) expected_meta2 = pmt.dict_add(pmt.dict_add(in_meta, pmt.intern('num'), pmt.from_long(4)), pmt.intern('name'), pmt.intern('param1')) expected_meta3 = pmt.dict_add(pmt.dict_add(in_meta, pmt.intern('name'), pmt.intern('param1')), pmt.intern('num'), pmt.from_long(1)) in_pdu = pmt.cons(in_meta, pmt.init_u8vector(len(in_data), in_data)) expected_pdu1 = pmt.cons(expected_meta1, pmt.init_u8vector(len(in_data), in_data)) expected_pdu2 = pmt.cons(expected_meta2, pmt.init_u8vector(len(in_data), in_data)) expected_pdu3 = pmt.cons(expected_meta3, pmt.init_u8vector(len(in_data), in_data)) self.tb.start() time.sleep(.001) self.emitter.emit(pmt.intern("MALFORMED PDU")) time.sleep(.001) self.emitter.emit(in_pdu) time.sleep(.001) self.set.set_key(pmt.intern('name')) self.set.set_val(pmt.intern('param1')) time.sleep(.001) self.emitter.emit(self.debug.get_message(0)) time.sleep(.001) self.set.set_kv(pmt.intern('num'), pmt.from_long(1)) time.sleep(.001) self.emitter.emit(self.debug.get_message(1)) time.sleep(.01) self.tb.stop() self.tb.wait() self.assertTrue(pmt.equal(self.debug.get_message(0), expected_pdu1)) self.assertTrue(pmt.equal(self.debug.get_message(1), expected_pdu2)) self.assertTrue(pmt.equal(self.debug.get_message(2), expected_pdu3))
def work(self, input_items, output_items): inb = input_items[0] linb = len(inb) gen = self.base.gen_n(linb) tags = self.get_tags_in_window(0, 0, linb, pmt.intern("rx_time")) if tags: tag = tags[-1] rx_time = tag.value seconds = pmt.to_uint64(pmt.tuple_ref(rx_time, 0)) fractional_seconds = pmt.to_double(pmt.tuple_ref(rx_time, 1)) timestamp = seconds + fractional_seconds if self.nbits > 0: ber = self.nerrs / float(self.nbits) #print "NBits: %d \tNErrs: %d \tBER: %.4E, \ttimestamp %f"%(int(self.nbits), int(self.nerrs), ber, timestamp) d = pmt.make_dict() d = pmt.dict_add(d, pmt.intern('timestamp'), pmt.from_double(timestamp)) d = pmt.dict_add(d, pmt.intern('ber'), pmt.from_double(ber)) self.message_port_pub(self.ber_port_id, d) self.nerrs = 0 self.nbits = 0 self.nerrs += numpy.sum(numpy.bitwise_xor(inb, gen)) self.nbits += len(inb) # if self.nbits > 0: # print "NBits: %d \tNErrs: %d \tBER: %.4E"%(int(self.nbits), int(self.nerrs), self.nerrs/self.nbits) return len(inb)
def __init__(self, cfreq, ant_list, coord_type): gr.sync_block.__init__(self, name="trackscan", in_sig=None, out_sig=None) self.message_port_register_out(pmt.intern("command")) self.message_port_register_in(pmt.intern("msg_in")) self.set_msg_handler(pmt.intern('msg_in'), self.handle_msg) #set up dictionary of observing info which will be sent through #the message port obs_key = pmt.intern("obs_type") obs_val = pmt.intern("track") ant_key = pmt.intern("antennas_list") ant_val = pmt.intern(ant_list) freq_key = pmt.intern("freq") freq_val = pmt.from_double(cfreq) coord_key = pmt.intern("coord_type") coord_val = pmt.intern(coord_type) command = pmt.make_dict() command = pmt.dict_add(command, ant_key, ant_val) command = pmt.dict_add(command, freq_key, freq_val) command = pmt.dict_add(command, obs_key, obs_val) command = pmt.dict_add(command, coord_key, coord_val) self.command = command
def test_003_normal_passa(self): self.dut = pdu_utils.pdu_range_filter(pmt.intern("start_time"), 1, 5, False) self.connectUp() in_data = [0, 0, 0, 0, 0, 0] i_meta = pmt.dict_add(pmt.make_dict(), pmt.intern("start_time"), pmt.from_double(1.0)) in_pdu = pmt.cons(i_meta, pmt.init_u8vector(len(in_data), in_data)) expected_data = [0, 0, 0, 0, 0, 0] e_meta = pmt.dict_add(pmt.make_dict(), pmt.intern("start_time"), pmt.from_double(1.0)) expected_pdu = pmt.cons( e_meta, pmt.init_u8vector(len(expected_data), expected_data)) self.tb.start() time.sleep(.001) self.emitter.emit(in_pdu) time.sleep(.01) self.tb.stop() self.tb.wait() self.assertEqual(1, self.debug.num_messages()) self.assertTrue(pmt.equal(self.debug.get_message(0), expected_pdu))
def test1_cleanup(self): ''' All files should be deleted by the monitor when complete ''' # open a dummy file fname = '/tmp/foo.txt' if os.path.exists(fname): os.remove(fname) Path(fname).touch() # PMT p = pmt.dict_add(pmt.make_dict(),pmt.intern('rx_freq'), pmt.from_double(915e6)) p = pmt.dict_add(p,pmt.intern('rx_rate'),pmt.from_double(30.72e6)) p = pmt.dict_add(p,pmt.intern('rx_time'),pmt.make_tuple( pmt.from_uint64(0),pmt.from_double(0))) p = pmt.dict_add(p,pmt.intern('fname'),pmt.intern(fname)) # blocks emitter = pdu_utils.message_emitter(pmt.PMT_NIL) debug = blocks.message_debug() monitor = file_monitor(10,'/tmp') # connect self.tb.msg_connect((emitter,'msg'),(monitor,'pdu')) # set up fg - terrible hacky way of doing this until we get # pdu utility message emitter working self.tb.start() emitter.emit(p) time.sleep(.05) self.tb.stop() self.tb.wait() # check data self.assertTrue(not os.path.exists(fname))
def parser_output(self, mat_frame, parity_ok=True, crc_ok=True): # Reshaping the mat_frame into a simple row vector and convert the vector to a string size_mat_frame = mat_frame.shape mat_vect = numpy.reshape(mat_frame, (size_mat_frame[0] * size_mat_frame[1], 1)) mat_vect = numpy.array(mat_vect) mat_vect_str = mat_vect.tostring() # Creation of dictionnary with the matrix and the crc_ok & parity_ok boolean key_crc = pmt.intern("crc_ok") val_crc = pmt.to_pmt(crc_ok) key_parity = pmt.intern("parity_ok") val_parity = pmt.to_pmt(parity_ok) key_mat = pmt.intern("mat_vect_str") val_mat = pmt.to_pmt(mat_vect_str) dic = pmt.make_dict() dic = pmt.dict_add(dic, key_crc, val_crc) dic = pmt.dict_add(dic, key_parity, val_parity) dic = pmt.dict_add(dic, key_mat, val_mat) # Sending the dictionnary to the acarsparser block self.message_port_pub(pmt.intern("parser_output"), pmt.cons(pmt.PMT_NIL, dic))
def test_pdu(self): cs = starcoder.command_source() snk = blocks.message_debug() self.tb.msg_connect((cs, 'out'), (snk, 'store')) msg = starcoder_pb2.BlockMessage() md = msg.pair_value.car.dict_value.entry.add() md.key.symbol_value = 'metadata_1' md.value.integer_value = -2 md = msg.pair_value.car.dict_value.entry.add() md.key.symbol_value = 'metadata_2' md.value.symbol_value = 'val' msg.pair_value.cdr.uniform_vector_value.f64_value.value.extend( [2.4, -12.3, 21.2]) pmt_dict = pmt.make_dict() pmt_dict = pmt.dict_add(pmt_dict, pmt.intern('metadata_1'), pmt.from_long(-2)) pmt_dict = pmt.dict_add(pmt_dict, pmt.intern('metadata_2'), pmt.intern('val')) expected = pmt.cons(pmt_dict, pmt.init_f64vector(3, [2.4, -12.3, 21.2])) self.tb.start() cs.push(msg.SerializeToString()) time.sleep(0.1) self.tb.stop() self.tb.wait() self.assertEqual(snk.num_messages(), 1) self.assertTrue(pmt.is_dict(snk.get_message(0))) self.assertTrue(pmt.equal(snk.get_message(0), expected))
def get_pdu_header(dest_id, src_id, frame_num, checksum=None): d = pmt.make_dict() d = set_pdu_header_checksum(d, checksum) d = pmt.dict_add(d, pmt.intern("frame_num"), pmt.from_long(frame_num)) d = pmt.dict_add(d, pmt.intern("src_id"), pmt.from_long(src_id)) d = pmt.dict_add(d, pmt.intern("dest_id"), pmt.from_long(dest_id)) return d
def set_pdu_header_checksum(meta, checksum=None): if checksum is None: meta = pmt.dict_add(meta, pmt.intern("checksum"), pmt.PMT_NIL) else: c = ndarray_to_pmt_u8vector(checksum) meta = pmt.dict_add(meta, pmt.intern("checksum"), c) return meta
def test_002_normal(self): tnow = time.time() in_data = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1] meta = pmt.dict_add(pmt.make_dict(), pmt.intern( 'system_time'), pmt.from_double(tnow - 10.0)) in_pdu = pmt.cons(meta, pmt.init_c32vector(len(in_data), in_data)) e_data = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1] e_meta = pmt.dict_add(pmt.make_dict(), pmt.intern( 'system_time'), pmt.from_double(tnow)) e_meta = pmt.dict_add(e_meta, pmt.intern( 'sys time delta (ms)'), pmt.from_double(10000.0)) e_pdu = pmt.cons(e_meta, pmt.init_c32vector(len(e_data), e_data)) # set up fg self.tb.start() self.time_delta.to_basic_block()._post(pmt.intern("pdu"), in_pdu) self.waitFor(lambda: self.debug.num_messages() == 1, timeout=1.0, poll_interval=0.01) self.tb.stop() self.tb.wait() # check data self.assertEqual(1, self.debug.num_messages()) a_meta = pmt.car(self.debug.get_message(0)) time_tag = pmt.dict_ref(a_meta, pmt.intern("system_time"), pmt.PMT_NIL) delta_tag = pmt.dict_ref(a_meta, pmt.intern( "sys time delta (ms)"), pmt.PMT_NIL) self.assertAlmostEqual(tnow, pmt.to_double(time_tag), delta=60) self.assertAlmostEqual(10000, pmt.to_double(delta_tag), delta=10)
def button_clicked(self): # Add metadata elements to PDU meta = pmt.make_dict() try: v_txt = str(self.textinputs[1].text().toUtf8()) if len(v_txt) > 0: meta_dict = ast.literal_eval(v_txt) for key, val in meta_dict.items(): if type(val) == list or type(val) == tuple: t = pmt.make_vector(len(val), pmt.from_uint64(0)) meta = pmt.dict_add(meta, pmt.intern(str(key)), pmt.to_tuple(t)) else: # Store as uint64 if isinstance(val, float): if val.is_integer(): meta = pmt.dict_add(meta, pmt.intern(str(key)), pmt.from_uint64(int(val))) else: meta = pmt.dict_add(meta, pmt.intern(str(key)), pmt.from_double(val)) else: meta = pmt.dict_add(meta, pmt.intern(str(key)), pmt.from_uint64(int(val))) except ValueError as err: print('PDU Parser Error: ', err) pass # Generate payload data v_txt = str(self.textinputs[0].text()) vec = self.data_types[str(self.input_types.currentText())](v_txt) # Publish message self.message_port_pub( pmt.intern("pdu_out"), pmt.cons(meta, pmt.to_pmt(numpy.array(vec, dtype=numpy.uint8))))
def test_001_three (self): in_data1 = [1+1j, 0-1j] in_data2 = [1, 0] in_data3 = [1j, -1] expected_data = in_data1 + in_data2 + in_data3 dict1 = pmt.dict_add(pmt.make_dict(), pmt.intern("burst_index"), pmt.cons(pmt.from_uint64(1), pmt.from_uint64(3))) dict2 = pmt.dict_add(pmt.make_dict(), pmt.intern("burst_index"), pmt.cons(pmt.from_uint64(2), pmt.from_uint64(3))) dict3 = pmt.dict_add(pmt.make_dict(), pmt.intern("burst_index"), pmt.cons(pmt.from_uint64(3), pmt.from_uint64(3))) in_pdu1 = pmt.cons(dict1, pmt.init_c32vector(len(in_data1), in_data1)) in_pdu2 = pmt.cons(dict2, pmt.init_c32vector(len(in_data2), in_data2)) in_pdu3 = pmt.cons(dict3, pmt.init_c32vector(len(in_data3), in_data3)) expected_pdu = pmt.cons(pmt.make_dict(), pmt.init_c32vector(len(expected_data), expected_data)) self.tb.start() time.sleep(.001) # handle non-pair input self.emitter.emit(pmt.intern("MALFORMED PDU")) time.sleep(.001) # handle malformed pair self.emitter.emit(pmt.cons(pmt.intern("NON-PDU"), pmt.intern("PAIR"))) time.sleep(.001) # handle out of order PDU self.emitter.emit(in_pdu3) time.sleep(.001) self.emitter.emit(in_pdu1) time.sleep(.001) self.emitter.emit(in_pdu2) time.sleep(.001) self.emitter.emit(in_pdu3) time.sleep(.01) self.tb.stop() self.tb.wait() out_data = pmt.c32vector_elements(pmt.cdr(self.debug.get_message(0))) self.assertComplexTuplesAlmostEqual(out_data, expected_data)
def test_001_c32(self): ''' uint8_t input data, no decimation, no filter ''' self.dut = pdu_utils.pdu_fir_filter(1, [1.0]) self.connectUp() i_data = [1, 0, 0, 0] * 5 i_meta = pmt.dict_add(pmt.make_dict(), pmt.intern("sample_rate"), pmt.from_float(1000.0)) in_pdu = pmt.cons(i_meta, pmt.init_c32vector(len(i_data), i_data)) e_data = [1, 0, 0, 0] * 5 e_meta = pmt.dict_add(pmt.make_dict(), pmt.intern("sample_rate"), pmt.from_float(1000.0)) e_pdu = pmt.cons(e_meta, pmt.init_c32vector(len(e_data), e_data)) self.tb.start() time.sleep(.01) self.emitter.emit(in_pdu) time.sleep(.1) self.tb.stop() self.tb.wait() #print("test_001:") #print("pdu expected: " + repr(pmt.car(e_pdu))) #print("pdu got: " + repr(pmt.car(self.debug.get_message(0)))) #print("data expected: " + repr(pmt.to_python(pmt.cdr(e_pdu)))) #print("data got: " + repr(pmt.to_python(pmt.cdr(self.debug.get_message(0))))) #print self.assertTrue(pmt.equal(self.debug.get_message(0), e_pdu))
def test_pdu_add_meta(self): tb = gr.top_block() dbg = blocks.message_debug() meta = pmt.make_dict() meta = pmt.dict_add(meta, pmt.intern('k1'), pmt.intern('v1')) meta = pmt.dict_add(meta, pmt.intern('k2'), pmt.intern('v2')) add_meta = pdu_add_meta(meta) pdu = pmt.cons(pmt.PMT_NIL, pmt.make_u8vector(10, 0)) tb.msg_connect((add_meta, 'out'), (dbg, 'store')) add_meta.to_basic_block()._post(pmt.intern('in'), pdu) add_meta.to_basic_block()._post( pmt.intern('system'), pmt.cons(pmt.intern('done'), pmt.from_long(1))) tb.start() tb.wait() pdu_out = dbg.get_message(0) meta_out = pmt.car(pdu_out) self.assertTrue(pmt.dict_has_key(meta_out, pmt.intern('k1')), 'Test key k1 not in output PDU metadata') self.assertTrue(pmt.dict_has_key(meta_out, pmt.intern('k2')), 'Test key k1 not in output PDU metadata') self.assertEqual(pmt.u8vector_elements(pmt.cdr(pdu_out)), pmt.u8vector_elements(pmt.cdr(pdu)), 'Output PDU data does not match input PDU data')
def create_frame(config, tag_key): symbols = get_random_qpsk(config.timeslots * config.active_subcarriers) d_block = modulate_mapped_gfdm_block( symbols, config.timeslots, config.subcarriers, config.active_subcarriers, 2, 0.2, dc_free=True, ) preamble = config.full_preambles[0] frame = add_cyclic_starfix(d_block, config.cp_len, config.cs_len) frame = np.concatenate((preamble, frame)) tag = gr.tag_t() tag.key = pmt.string_to_symbol(tag_key) d = pmt.make_dict() d = pmt.dict_add(d, pmt.mp("xcorr_idx"), pmt.from_uint64(42)) d = pmt.dict_add(d, pmt.mp("xcorr_offset"), pmt.from_uint64(4711)) d = pmt.dict_add(d, pmt.mp("sc_rot"), pmt.from_complex(1.0 + 0.0j)) # tag.offset = data.size + cp_len tag.srcid = pmt.string_to_symbol("qa") tag.value = d return frame, symbols, tag
def test_001_split(self): emitter = pdu_utils.message_emitter() split = pdu_utils.pdu_split() d1 = blocks.message_debug() d2 = blocks.message_debug() self.tb.msg_connect((emitter, 'msg'), (split, 'pdu_in')) self.tb.msg_connect((split, 'dict'), (d1, 'store')) self.tb.msg_connect((split, 'data'), (d2, 'store')) in_meta1 = pmt.dict_add(pmt.make_dict(), pmt.intern('num'), pmt.from_long(4)) in_meta2 = pmt.dict_add(pmt.make_dict(), pmt.intern('n'), pmt.from_long(99)) in_pdu = pmt.cons(in_meta1, pmt.init_u8vector(6, range(6))) self.tb.start() time.sleep(.001) emitter.emit(pmt.intern("MALFORMED PDU")) time.sleep(.001) emitter.emit(pmt.cons(pmt.PMT_NIL, pmt.init_u8vector(2, range(2)))) time.sleep(.001) emitter.emit(pmt.cons(in_meta2, pmt.init_u8vector(0, []))) time.sleep(.001) emitter.emit(in_pdu) time.sleep(.01) self.tb.stop() self.tb.wait() self.assertTrue(pmt.equal(d1.get_message(0), in_meta2)) self.assertTrue(pmt.equal(d1.get_message(1), in_meta1)) self.assertTrue( pmt.equal(d2.get_message(0), pmt.init_u8vector(2, range(2)))) self.assertTrue( pmt.equal(d2.get_message(1), pmt.init_u8vector(6, range(6))))
def send_command(self, freq, bw): command = pmt.make_dict() command = pmt.dict_add(command, pmt.intern("freq"), pmt.from_double(freq)) command = pmt.dict_add(command, pmt.intern("bandwidth"), pmt.from_double(freq)) return command
def test_002_uint8_metadata_header(self): emitter = pdu_utils.message_emitter() writer = csv_writer('/tmp/file.csv', True, '', 'uint8') # generate pdu metadata = pmt.dict_add(pmt.make_dict(), pmt.intern('a'), pmt.intern('a')) metadata = pmt.dict_add(metadata, pmt.intern('b'), pmt.from_long((0))) data = pmt.init_u8vector(5, [11, 12, 13, 14, 15]) pdu = pmt.cons(metadata, data) # expected will only have intern strings metadata = pmt.dict_add(pmt.make_dict(), pmt.intern('a'), pmt.intern('a')) metadata = pmt.dict_add(metadata, pmt.intern('b'), pmt.intern('0')) expected = pmt.cons(metadata, data) # run tb = gr.top_block() tb.msg_connect((emitter, 'msg'), (writer, 'in')) tb.start() emitter.emit(expected) time.sleep(.5) tb.stop() tb.wait() # read in csv self.assertTrue( self.check_file('/tmp/file.csv', expected, has_header=True))
def test_006_basic_nrz(self): self.dut = pdu_utils.pdu_preamble([], [], 1, 0, True) self.connectUp() input_data = pmt.init_u8vector(8, [1, 0, 1, 1, 0, 0, 1, 0]) input_dict = pmt.dict_add(pmt.make_dict(), pmt.intern("KEY"), pmt.intern("VALUE")) input_pdu = pmt.cons(input_dict, input_data) expected_data = [1, -1, 1, 1, -1, -1, 1, -1] expected_dict = pmt.dict_add(pmt.make_dict(), pmt.intern("KEY"), pmt.intern("VALUE")) expected_pdu = pmt.cons( expected_dict, pmt.init_f32vector(len(expected_data), expected_data)) self.tb.start() time.sleep(.001) self.emitter.emit(input_pdu) time.sleep(.01) self.tb.stop() self.tb.wait() self.assertEqual(1, self.debug.num_messages()) print("test_006_basic_nrz:") print("pdu expected: " + repr(pmt.car(expected_pdu))) print("pdu got: " + repr(pmt.car(self.debug.get_message(0)))) print("data expected: " + repr(pmt.to_python(pmt.cdr(expected_pdu)))) print("data got: " + repr(pmt.to_python(pmt.cdr(self.debug.get_message(0))))) print self.assertTrue(pmt.equal(self.debug.get_message(0), expected_pdu))
def test_006_f32(self): ''' float input data, complicated input, decimation, and filtering ''' taps = [ -0.1, -0.2, -0.3, 0., 0.8, 1.4, 0.7, -1.9, -4.5, -3.8, 2.9, 14.4, 25.5, 30.1, 30.1, 25.5, 14.4, 2.9, -3.8, -4.5, -1.9, 0.7, 1.4, 0.8, 0., -0.3, -0.2, -0.1 ] print(len(taps)) self.dut = pdu_utils.pdu_fir_filter(4, taps) self.connectUp() i_data = [ 0., 0.25, 0.48, 0.68, 0.84, 0.95, 1., 0.98, 0.91, 0.78, 0.6, 0.38, 0.14, -0.11, -0.35, -0.57, -0.76, -0.89, -0.98, -1., -0.96, -0.86, -0.71, -0.51, -0.28, -0.03, 0.22, 0.45, 0.66, 0.82, 0.94, 0.99, 0.99, 0.92, 0.8, 0.62, 0.41, 0.17, -0.08, -0.32, -0.54, -0.73, -0.88, -0.97, -1., -0.97, -0.88, -0.73, -0.54, -0.31, -0.07, 0.18, 0.42, 0.63, 0.8, 0.93, 0.99, 0.99, 0.93, 0.82, 0.65, 0.44, 0.21, -0.04, -0.29, -0.52, -0.71, -0.86, -0.96, -1., -0.98, -0.89 ] i_meta = pmt.dict_add(pmt.make_dict(), pmt.intern("sample_rate"), pmt.from_double(40e3)) i_meta = pmt.dict_add(i_meta, pmt.intern("start_time"), pmt.from_double(65.4321)) in_pdu = pmt.cons(i_meta, pmt.init_f32vector(len(i_data), i_data)) e_data = [ 8.344998, 95.737, 121.949005, 33.454994, -85.351006, -126.05898, -50.684998, 71.381004, 127.386, 66.26399, -55.451004, -126.658005, -81.652, 38.550003, 123.046005, 94.08399, -19.649996, -123.254005 ] e_meta = pmt.dict_add(pmt.make_dict(), pmt.intern("sample_rate"), pmt.from_double(10e3)) e_meta = pmt.dict_add(e_meta, pmt.intern("start_time"), pmt.from_double(65.432050)) e_pdu = pmt.cons(e_meta, pmt.init_f32vector(len(e_data), e_data)) self.tb.start() time.sleep(.01) self.emitter.emit(in_pdu) time.sleep(.1) self.tb.stop() self.tb.wait() #print("test_005:") #print("pdu expected: " + repr(pmt.car(e_pdu))) #print("pdu got: " + repr(pmt.car(self.debug.get_message(0)))) #print("data expected: " + repr(pmt.to_python(pmt.cdr(e_pdu)))) #print("data got: " + repr(pmt.to_python(pmt.cdr(self.debug.get_message(0))))) #print self.assertTrue(pmt.equal(pmt.car(self.debug.get_message(0)), e_meta)) v_diff = np.abs( pmt.f32vector_elements(pmt.cdr(self.debug.get_message(0))) - np.array(e_data)) / e_data print("Maximum error is", np.max(v_diff)) self.assertTrue(np.max(v_diff) < 0.0001)
def msg_handler_analyzed_data_in(self, msg): print("in msg_handler_analyzed_data_in") self.lock() gate_params = pmt.vector_ref(msg, 0) gate_type_PMT = pmt.dict_ref( gate_params, pmt.from_float( quantum_gate_param_type.quantum_gate_param_type.GATE_TYPE), pmt.PMT_NIL) if (pmt.eq(gate_type_PMT, pmt.PMT_NIL)): return gate_type = pmt.to_float(gate_type_PMT) print("gate_params.gate_type=" + str(gate_type)) qubit_id_PMT = pmt.dict_ref( gate_params, pmt.from_float( quantum_gate_param_type.quantum_gate_param_type.QUBIT_ID), pmt.PMT_NIL) if (pmt.eq(qubit_id_PMT, pmt.PMT_NIL)): return qubit_id = pmt.to_float(qubit_id_PMT) print("gate_params.qubit_id=" + str(qubit_id)) if (gate_type == quantum_gate_type.quantum_gate_type.X): print("in msg_handler_analyzed_data_in X gate") #回路を作る RO_STATE = self._qubit_stat_map[qubit_id] if (float(RO_STATE.angle) == 0.0): RO_STATE.angle = 180.0 else: RO_STATE.angle = 0.0 self._qubit_stat_map[qubit_id] = RO_STATE elif (gate_type == quantum_gate_type.quantum_gate_type.RO): print("in msg_handler_analyzed_data_in RO") #回路を実行する RO_STATE = self._qubit_stat_map[qubit_id] SIM_msg = pmt.make_dict() SIM_msg = pmt.dict_add( SIM_msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.ID), pmt.from_float(qubit_id)) SIM_msg = pmt.dict_add( SIM_msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.ANGLE), pmt.from_float(float(RO_STATE.angle))) SIM_msg = pmt.dict_add( SIM_msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.STATE), pmt.from_float(quantum_qubit_RO_state_type. quantum_qubit_RO_state_type.START)) self.message_port_pub(pmt.intern('simulated_data'), SIM_msg) RO_STATE.state = quantum_qubit_RO_state_type.quantum_qubit_RO_state_type.START self._qubit_stat_map[qubit_id] = RO_STATE self.unlock()
def _post_phase_inc_cmd(self, new_phase_inc, offset=None): """Post phase increment update command to the rotator block""" cmd = pmt.make_dict() cmd = pmt.dict_add(cmd, pmt.intern("inc"), pmt.from_double(new_phase_inc)) if (offset is not None): cmd = pmt.dict_add(cmd, pmt.intern("offset"), pmt.from_uint64(offset)) self.rotator_cc.insert_tail(pmt.to_pmt("cmd"), cmd)
def test_001_all_header_fields(self): with open('/tmp/file.csv', 'w') as f: # write header f.write('field0(string), , field1(bool), field2(float),' + 'field3(long), field4(uint64), field5(double),' + 'field6(complex),field7,field8(time),field9(time_tuple)\n') # add some data f.write( 'field0, empty, True, 1.0,1234567890,987654321, 2.5,1+2j,string,1.0,1.0,1,2,3,4,5\n' ) # start reader/ reader = csv_reader(fname='/tmp/file.csv', has_header=True, period=10, start_delay=0, repeat=False) # expected pdu metadata = pmt.dict_add(pmt.make_dict(), pmt.intern('field0'), pmt.intern('field0')) metadata = pmt.dict_add(metadata, pmt.intern('field1'), pmt.from_bool(True)) metadata = pmt.dict_add(metadata, pmt.intern('field2'), pmt.from_float(1.0)) metadata = pmt.dict_add(metadata, pmt.intern('field3'), pmt.from_long(1234567890)) metadata = pmt.dict_add(metadata, pmt.intern('field4'), pmt.from_uint64(987654321)) metadata = pmt.dict_add(metadata, pmt.intern('field5'), pmt.from_double(2.5)) metadata = pmt.dict_add(metadata, pmt.intern('field6'), pmt.from_complex(1.0 + 2j)) metadata = pmt.dict_add(metadata, pmt.intern('field7'), pmt.intern('string')) metadata = pmt.dict_add( metadata, pmt.intern('field8'), pmt.cons(pmt.from_uint64(1), pmt.from_double(0))) metadata = pmt.dict_add( metadata, pmt.intern('field9'), pmt.make_tuple(pmt.from_uint64(1), pmt.from_double(0))) data = pmt.init_u8vector(5, [1, 2, 3, 4, 5]) expected = pmt.cons(metadata, data) # run self.tb.msg_connect((reader, 'out'), (self.debug, 'store')) self.tb.start() time.sleep(.5) self.tb.stop() self.tb.wait() got = self.debug.get_message(0) self.assertTrue(pmt.equal(expected, got))
def test_coerce(self): samp_rate = 1e6 freq_offset = -400e3 center_freq = 911e6 # blocks self.emitter = pdu_utils.message_emitter() self.cf = fhss_utils.cf_estimate(fhss_utils.COERCE, [x * 1e6 for x in range(900, 930)]) self.debug = blocks.message_debug() # connections self.tb.msg_connect((self.emitter, 'msg'), (self.cf, 'in')) self.tb.msg_connect((self.cf, 'out'), (self.debug, 'store')) # data in_data = (1 + 0j, ) * 2048 i_vec = pmt.init_c32vector(len(in_data), in_data) out_data = np.exp(1j * np.arange(0, 2 * np.pi * (freq_offset / samp_rate * len(in_data)), 2 * np.pi * (freq_offset / samp_rate), dtype=np.complex64)) e_vec = pmt.init_c32vector(len(out_data), out_data.tolist( )) # pmt doesn't play nice with numpy sometimes, convert to list meta = pmt.make_dict() meta = pmt.dict_add(meta, pmt.intern("sample_rate"), pmt.from_float(samp_rate)) meta = pmt.dict_add(meta, pmt.intern("center_frequency"), pmt.from_float(center_freq + freq_offset)) in_pdu = pmt.cons(meta, i_vec) e_pdu = pmt.cons(meta, e_vec) # flowgraph self.tb.start() time.sleep(.001) self.emitter.emit(in_pdu) time.sleep(.01) self.tb.stop() self.tb.wait() # parse output #print "got ", list(pmt.to_python(pmt.cdr(self.debug.get_message(0)))) #print "got ", self.debug.get_message(0) rcv = self.debug.get_message(0) rcv_meta = pmt.car(rcv) rcv_data = pmt.cdr(rcv) rcv_cf = pmt.dict_ref(rcv_meta, pmt.intern("center_frequency"), pmt.PMT_NIL) # asserts self.assertComplexTuplesAlmostEqual( tuple(pmt.c32vector_elements(rcv_data)), tuple(out_data), 2) self.assertTrue(pmt.equal(rcv_cf, pmt.from_float(911e6)))
def makeDict(self, **kwargs): pmtDict = pmt.make_dict() if "freq" in kwargs: pmtDict = pmt.dict_add(pmtDict, pmt.intern("rx_freq"), pmt.from_double(kwargs["freq"])) if "rate" in kwargs: pmtDict = pmt.dict_add(pmtDict, pmt.intern("rx_rate"), pmt.from_double(kwargs["rate"])) if "epoch_int" in kwargs and "epoch_frac" in kwargs: pmtDict = pmt.dict_add(pmtDict, pmt.intern("rx_time"), pmt.make_tuple(pmt.from_uint64(kwargs["epoch_int"]), pmt.from_double(kwargs["epoch_frac"]))) return pmtDict
def work(self, input_items, output_items): in0 = input_items[0] #out = output_items[0] for idx in range(in0.shape[0]): self.nsamplesread += len(in0[idx, :]) # continue#return len(input_items[0]) self.imgnp[self.count, :] += in0[idx, :] self.countavg += 1 if self.countavg == self.avgsize: self.countavg = 0 self.count += 1 if self.count == self.nrows: self.count = 0 self.dropcount += 1 if self.dropcount >= self.n_drops: self.dropcount = 0 Sxx = spectrogram.normalize_spectrogram( self.imgnp) #/self.avgsize) if self.cancel_DCoffset: pwr_min = np.min(Sxx) Sxx[:, Sxx.shape[1] / 2] = pwr_min self.imgcv[:, 0:self.vlen, 0] = np.uint8(Sxx * 255) self.imgcv[:, :, 1] = self.imgcv[:, :, 0] self.imgcv[:, :, 2] = self.imgcv[:, :, 0] detected_boxes = self.classifier.classify(self.imgcv) self.last_result = detected_boxes for box in detected_boxes: d = pmt.make_dict() d = pmt.dict_add(d, pmt.intern('tstamp'), pmt.from_long(self.img_tstamp)) for k, v in box.items(): if k == 'topleft' or k == 'bottomright': pmt_val = pmt.make_dict() pmt_val = pmt.dict_add(pmt_val, pmt.intern('x'), pmt.from_long(v['x'])) pmt_val = pmt.dict_add(pmt_val, pmt.intern('y'), pmt.from_long(v['y'])) elif k == 'confidence': pmt_val = pmt.from_float(float(v)) elif k == 'label': pmt_val = pmt.string_to_symbol(v) else: raise NotImplementedError( 'Did not expect parameter {}'.format(k)) d = pmt.dict_add(d, pmt.intern(k), pmt_val) # print 'gonna send:',pmt.write_string(d) self.message_port_pub(pmt.intern('msg_out'), d) # self.message_port_pub(pmt.intern('boxes'), pmt.intern(detected_boxes)) self.imgnp[:] = 0 self.img_tstamp += int(self.vlen * self.nrows) return len(input_items[0])
def msg_handler_doppler(self, msg_in): ## print('-------------------- msg_handler_doppler --------------------') iq_samples = pmt.to_python(pmt.cdr(msg_in)) success, doppler = self._obj.get_doppler(iq_samples) msg_out = pmt.make_dict() msg_out = pmt.dict_add(msg_out, pmt.intern('success'), pmt.to_pmt(np.bool(success))) msg_out = pmt.dict_add(msg_out, pmt.intern('doppler'), pmt.to_pmt(doppler)) ## print(msg_out) self.message_port_pub(self._port_doppler, msg_out)
def send_frame(self, timestamp, center_frequency, confidence): msg_meta = pmt.dict_add(pmt.make_dict(), pmt.intern('timestamp'), pmt.from_uint64(timestamp)) msg_meta = pmt.dict_add(msg_meta, pmt.intern('center_frequency'), pmt.from_float(center_frequency)) msg_meta = pmt.dict_add(msg_meta, pmt.intern('confidence'), pmt.from_long(confidence)) msg = pmt.cons(msg_meta, pmt.init_u8vector(2, range(2))) if timestamp > self.max_timestamp: self.max_timestamp = timestamp self.sorter.to_basic_block()._post(pmt.intern("pdus"), msg)
def setUp (self): self.tb = gr.top_block () self.emitter = pdu_utils.message_emitter() self.emd = pdu_utils.extract_metadata(pmt.intern("test_key"), 1, 0) self.debug = blocks.message_debug() self.tb.msg_connect((self.emitter, 'msg'), (self.emd, 'dict')) self.tb.msg_connect((self.emd, 'msg'), (self.debug, 'store')) self.base_dict = pmt.make_dict() self.base_dict = pmt.dict_add(self.base_dict, pmt.intern("key1"), pmt.intern("value1")) self.base_dict = pmt.dict_add(self.base_dict, pmt.intern("key2"), pmt.intern("value2")) self.base_dict = pmt.dict_add(self.base_dict, pmt.intern("uint64_key"), pmt.from_uint64(1234567)) self.base_dict = pmt.dict_add(self.base_dict, pmt.intern("double_key"), pmt.from_double(1.234567))
def test19(self): max_key = pmt.intern("MAX") _max = pmt.from_long(self.MAXINT32) min_key = pmt.intern("MIN") _min = pmt.from_long(self.MININT32) d = pmt.make_dict() d = pmt.dict_add(d, max_key, _max) d = pmt.dict_add(d, min_key, _min) s = pmt.serialize_str(d) deser = pmt.deserialize_str(s) self.assertTrue(pmt.equal(d, deser)) p_dict = pmt.to_python(deser) self.assertEqual(self.MAXINT32, p_dict["MAX"]) self.assertEqual(self.MININT32, p_dict["MIN"])
def handle_msg(self, msg): if pmt.dict_has_key(msg, pmt.intern(self.len_tag_key)): packet_len = pmt.to_python(msg)[self.len_tag_key] msg = pmt.dict_delete(msg, pmt.intern(self.len_tag_key)) msg = pmt.dict_add(msg, pmt.intern(self.len_tag_key), pmt.from_long(packet_len * 8)) self.message_port_pub(self.msg_buf_out, msg)
def set_value(self, value, conversion = pmt.from_double ): """ When the set_value(value) method is called, sends a message containing a PMT dict {key, value} """ pmt_val = conversion(value) pmt_dict = pmt.make_dict() pmt_dict = pmt.dict_add(pmt_dict, self._key, pmt_val) self.message_port_pub(self._out_port, pmt_dict)
def pdu_arg_add(pdu, k, v): meta = pmt.car(pdu); data = pmt.cdr(pdu); if(pmt.is_null(meta)): meta = pmt.make_dict(); assert(pmt.is_dict(meta)); meta = pmt.dict_add(meta, k, v); return pmt.cons(meta,data);
def handler(self, pdu): data = pmt.to_python(pmt.cdr(pdu)) meta = pmt.car(pdu) data = data - numpy.mean(data) # remove DC mag_sq = numpy.mean(numpy.real(data*numpy.conj(data))) #compute average Mag Sq p = self.k + 10*numpy.log10(mag_sq) # print "Power: %f"%(p) meta = pmt.dict_add(meta, pmt.intern("power"), pmt.from_float( p ) ) # done pass vector element for now ... self.message_port_pub( pmt.intern("cpdus"), pmt.cons( meta, pmt.PMT_NIL ) );
def handler(self, msg): meta = pmt.car(msg); samples = pmt.cdr(msg); x = numpy.array(pmt.c32vector_elements(samples), dtype=numpy.complex64) x_2 = numpy.real(x * x.conjugate()) # smoothing filter x_2f = numpy.convolve(50*[1], x_2); # find max power to compute power thresh maxidx = numpy.argmax(x_2f); maxpow = x_2f[maxidx]; thr = maxpow / 16; # 6db down # find where we are below thresh start_offset = 1000; idx = numpy.argmax(x_2f[start_offset:] < thr) + start_offset + 1000; # print "below = (%d, %f)"%(idx, x_2f[idx]) # discard bursts where we dont find an end if idx == start_offset: print "WARNING: length detect: discarding burst" return # tack on some metadata meta = pmt.dict_add(meta, pmt.intern("est_len"), pmt.from_double(int(idx))); meta = pmt.dict_add(meta, pmt.intern("orig_len"), pmt.from_double(len(x))); # extract the useful signal x = x[0:idx]; # send it on its way samples_out = pmt.init_c32vector(len(x), map(lambda i: complex(i), x)) cpdu = pmt.cons(meta,samples_out) self.message_port_pub(pmt.intern("cpdus"), cpdu);
def handler(self,msg): meta = pmt.car(msg) metapy = pmt.to_python(meta) t_start = metapy['start']/self.fs t_stop = metapy['end'] /self.fs tag_times = [] #print [t_start, t_stop] #print type(t_start) self.pcap = PcapReader(self.f) for i,p in enumerate(self.pcap): if p.time > t_start: if p.time > t_stop: break; else: samp_offset = int(self.fs*(p.time-t_start) ) k = "packet_id" v = "%d"%(i) tag_times.append( (samp_offset, k, v) ) tags = pmt.to_pmt(tag_times) meta = pmt.dict_add(meta, pmt.intern("tags"), tags) #print "tag times: ", tag_times self.message_port_pub(pmt.intern("pdus"), pmt.cons(meta, pmt.cdr(msg)));
def update_timestamp(hdr,seg_size): if pmt.dict_has_key(hdr, pmt.string_to_symbol("rx_time")): r = pmt.dict_ref(hdr, pmt.string_to_symbol("rx_time"), pmt.PMT_NIL) secs = pmt.tuple_ref(r, 0) fracs = pmt.tuple_ref(r, 1) secs = float(pmt.to_uint64(secs)) fracs = pmt.to_double(fracs) t = secs + fracs else: sys.stderr.write("Could not find key 'time': \ invalid or corrupt data file.\n") sys.exit(1) new_hdr = pmt.dict_delete(hdr, pmt.intern("rx_time")) if pmt.dict_has_key(hdr, pmt.intern("rx_rate")): r = pmt.dict_ref(hdr, pmt.intern("rx_rate"), pmt.PMT_NIL) rate = pmt.to_double(r) new_t = t + float(seg_size)/rate new_secs = long(new_t) new_fracs = new_t - new_secs time_val = pmt.make_tuple(pmt.from_uint64(new_secs), pmt.from_double(new_fracs)) new_hdr = pmt.dict_add(new_hdr, pmt.intern("rx_time"), time_val) return new_hdr
def test_001_t (self): # set up fg msg_meta = pmt.make_dict() msg_meta = pmt.dict_add(msg_meta, pmt.to_pmt("freq"), pmt.to_pmt("val")) vec1 = [0x01, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00,0x01, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00,0x01, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00,0x01, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00]; msg_vector = pmt.init_u8vector(len(vec1), vec1) msg = pmt.cons(msg_meta, msg_vector) src = blocks.message_strobe(msg, 10) dut = capture_tools.bit_sniffer(fade_out=500, hexadecimal=True) self.tb.msg_connect((src, "strobe"), (dut, "packets")) self.tb.start () time.sleep(5) vec1 = [0x01, 0x00, 0x01, 0x00, 0x01, 0x00]; msg_vector = pmt.init_u8vector(len(vec1), vec1) msg = pmt.cons(msg_meta, msg_vector) src.set_msg(msg); time.sleep(5) vec1 = [0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x01]; msg_vector = pmt.init_u8vector(len(vec1), vec1) msg = pmt.cons(msg_meta, msg_vector) src.set_msg(msg); time.sleep(5) self.tb.stop()
def rx_handler(self, msg): meta = pmt.car(msg); data_in = array.array('B', pmt.u8vector_elements(pmt.cdr(msg))) data_list = data_in.tolist(); if data_list[0:8] == self.operations["data"]: self.rx_cnt = self.rx_cnt + 1; seq = struct.unpack("<i", self.pack_bits(data_list[8:8+32]))[0]; #print "rx sequence: %d"%(seq); # send ACK bak self.send_ack(seq); # pass along sequence number for fun if(pmt.is_null(meta)): meta = pmt.make_dict(); meta = pmt.dict_add(meta, pmt.intern("arq_seq"), pmt.from_long(seq)); if(self.rx_record.has_key(seq)): print "duplicate recieve data pkt seq! %d"%(seq) return; self.rx_record[seq] = time.time(); # send payload to next layer data_upper = data_list[8+32:]; self.message_port_pub(pmt.intern("rx_out"), pmt.cons(meta,pmt.init_u8vector(len(data_upper),data_upper))); return; if data_list[0:8] == self.operations["ack"]: self.ack_rx_cnt = self.ack_rx_cnt + 1; # set pkt ack'd locally seq = struct.unpack("<i", self.pack_bits(data_list[8:8+32]))[0]; print "got pkt ack (%d)"%(seq) self.ack(seq); return; # fall through fail print "got invalid ARQ header, discarding!"
def rx_from_tap(self, msg): meta = pmt.car(msg) meta = pmt.dict_add(meta, pmt.intern('dst_addr'), pmt.init_u8vector(len(self.dst_addr), self.dst_addr)) self.message_port_pub(pmt.intern('to_mac'), pmt.cons(meta, pmt.cdr(msg)))
def handler(self, msg): # get input meta = pmt.car(msg); samples = pmt.cdr(msg); x = numpy.array(pmt.c32vector_elements(samples), dtype=numpy.complex64) # upsample and normalize power xi = signal.resample(x, len(x)* (self.N / self.T)); # compute the symbol timing xt = numpy.real(xi*xi.conjugate()) * numpy.exp( (-1j*2.0*numpy.pi/self.N) * numpy.linspace(1,len(xi),len(xi)) ); s = numpy.sum(x); tau = (-self.T/(2*numpy.pi)) * numpy.arctan2(numpy.imag(s), numpy.real(s)); # publish timing metric for debugging tm = pmt.init_c32vector(xt.size, map(lambda i: complex(i), xt)) tm_cpdu = pmt.cons(meta,tm) self.message_port_pub(pmt.intern("timing_metric"), tm_cpdu); # extract symbols offset = round(self.N*tau/self.T); fo = (offset + self.N)%self.N; sym = xi[fo:-1:self.N]; # normalize power to 1 sym = sym / numpy.mean(numpy.real(sym * sym.conjugate())); # publish timing correct symbols (with frequency offset) sm = pmt.init_c32vector(sym.size, map(lambda i: complex(i), sym)) sm_cpdu = pmt.cons(meta,sm) self.message_port_pub(pmt.intern("sym_timed"), sm_cpdu); # compute symbol frequency offset (linear phase offset within block) x_n = numpy.power(sym[200:1000], self.O); phase_ramp = numpy.unwrap(numpy.angle( x_n )); f_off_O = numpy.mean(numpy.diff(phase_ramp)); goodstat = numpy.std(numpy.diff(phase_ramp)); f_off = f_off_O / self.O; # check percentages self.nburst = self.nburst + 1; if(goodstat < 1.0): self.nburst_ok = self.nburst_ok + 1; else: print "WARNING: feedforward synchronizer discarding burst, goodness metric %f < 1.0 (likely poor timing recovery occurred, the CFO phase ramp looks like garbage)"%(goodstat) return print "sync: "+str((goodstat, self.nburst, self.nburst_ok, self.nburst_ok*100.0 / self.nburst)); # export phase ramp pr = pmt.init_f32vector(phase_ramp.size, map(lambda i: float(i), phase_ramp)) pr_fpdu = pmt.cons(meta,pr) self.message_port_pub(pmt.intern("phase_ramp"), pr_fpdu); # apply frequency offset correction xc = numpy.multiply(sym, numpy.exp(-1j * f_off * numpy.linspace(1,sym.size,sym.size))); # compute and correct static symbol phase offset xcp = numpy.power(xc[400:1000], self.O); # linear mean theta = numpy.mean( numpy.angle( xcp ) ) / self.O + numpy.pi/4; # weighted mean # theta = numpy.sum(numpy.angle(xcp) * numpy.abs(xcp)) / numpy.sum(numpy.abs(xcp)); # theta = theta / self.O + numpy.pi/4; xc = xc * numpy.exp(-1j*theta); # show time, frequency and phase estimates #print "tau = %f, f_off = %f, theta = %f"%(tau, f_off, theta); # add our estimates to the metadata dictionary meta = pmt.dict_add(meta, pmt.intern("tau"), pmt.from_double(tau)); meta = pmt.dict_add(meta, pmt.intern("f_off"), pmt.from_double(f_off)); meta = pmt.dict_add(meta, pmt.intern("theta"), pmt.from_double(theta)); # publish freq corrected symbols xcm = pmt.init_c32vector(xc.size, map(lambda i: complex(i), xc)) xcm_cpdu = pmt.cons(meta,xcm) self.message_port_pub(pmt.intern("cpdus"), xcm_cpdu);
def __init__(self): gr.top_block.__init__(self, "Top Block") Qt.QWidget.__init__(self) self.setWindowTitle("Top Block") try: self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc')) except: pass self.top_scroll_layout = Qt.QVBoxLayout() self.setLayout(self.top_scroll_layout) self.top_scroll = Qt.QScrollArea() self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame) self.top_scroll_layout.addWidget(self.top_scroll) self.top_scroll.setWidgetResizable(True) self.top_widget = Qt.QWidget() self.top_scroll.setWidget(self.top_widget) self.top_layout = Qt.QVBoxLayout(self.top_widget) self.top_grid_layout = Qt.QGridLayout() self.top_layout.addLayout(self.top_grid_layout) self.settings = Qt.QSettings("GNU Radio", "top_block") self.restoreGeometry(self.settings.value("geometry").toByteArray()) ################################################## # Variables ################################################## self.scroll = scroll = False self.samp_rate = samp_rate = 32000 self.offset = offset = 0 self.hex_output = hex_output = False self.bits_per_word = bits_per_word = 8 self.binary_output = binary_output = True self.ascii_output = ascii_output = False ################################################## # Blocks ################################################## _scroll_check_box = Qt.QCheckBox("scroll") self._scroll_choices = {True: True, False: False} self._scroll_choices_inv = dict((v,k) for k,v in self._scroll_choices.iteritems()) self._scroll_callback = lambda i: Qt.QMetaObject.invokeMethod(_scroll_check_box, "setChecked", Qt.Q_ARG("bool", self._scroll_choices_inv[i])) self._scroll_callback(self.scroll) _scroll_check_box.stateChanged.connect(lambda i: self.set_scroll(self._scroll_choices[bool(i)])) self.top_layout.addWidget(_scroll_check_box) self._offset_range = Range(0, 7, 1, 0, 200) self._offset_win = RangeWidget(self._offset_range, self.set_offset, "offset", "counter_slider", int) self.top_layout.addWidget(self._offset_win) _hex_output_check_box = Qt.QCheckBox("hex_output") self._hex_output_choices = {True: True, False: False} self._hex_output_choices_inv = dict((v,k) for k,v in self._hex_output_choices.iteritems()) self._hex_output_callback = lambda i: Qt.QMetaObject.invokeMethod(_hex_output_check_box, "setChecked", Qt.Q_ARG("bool", self._hex_output_choices_inv[i])) self._hex_output_callback(self.hex_output) _hex_output_check_box.stateChanged.connect(lambda i: self.set_hex_output(self._hex_output_choices[bool(i)])) self.top_layout.addWidget(_hex_output_check_box) self._bits_per_word_range = Range(2, 8, 1, 8, 200) self._bits_per_word_win = RangeWidget(self._bits_per_word_range, self.set_bits_per_word, "bits_per_word", "counter_slider", int) self.top_layout.addWidget(self._bits_per_word_win) _binary_output_check_box = Qt.QCheckBox("binary_output") self._binary_output_choices = {True: True, False: False} self._binary_output_choices_inv = dict((v,k) for k,v in self._binary_output_choices.iteritems()) self._binary_output_callback = lambda i: Qt.QMetaObject.invokeMethod(_binary_output_check_box, "setChecked", Qt.Q_ARG("bool", self._binary_output_choices_inv[i])) self._binary_output_callback(self.binary_output) _binary_output_check_box.stateChanged.connect(lambda i: self.set_binary_output(self._binary_output_choices[bool(i)])) self.top_layout.addWidget(_binary_output_check_box) _ascii_output_check_box = Qt.QCheckBox("ascii_output") self._ascii_output_choices = {True: True, False: False} self._ascii_output_choices_inv = dict((v,k) for k,v in self._ascii_output_choices.iteritems()) self._ascii_output_callback = lambda i: Qt.QMetaObject.invokeMethod(_ascii_output_check_box, "setChecked", Qt.Q_ARG("bool", self._ascii_output_choices_inv[i])) self._ascii_output_callback(self.ascii_output) _ascii_output_check_box.stateChanged.connect(lambda i: self.set_ascii_output(self._ascii_output_choices[bool(i)])) self.top_layout.addWidget(_ascii_output_check_box) self.capture_tools_bit_sniffer_0 = capture_tools.bit_sniffer(200, hex_output, offset, bits_per_word, False, False, ascii_output, binary_output, 0, scroll) msg_meta = pmt.make_dict() msg_meta = pmt.dict_add(msg_meta, pmt.to_pmt("freq"), pmt.to_pmt("val")) vec1 = [0x01, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00,0x01, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00,0x01, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00,0x01, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00]; msg_vector = pmt.init_u8vector(len(vec1), vec1) msg = pmt.cons(msg_meta, msg_vector) self.blocks_message_strobe_0 = blocks.message_strobe(msg, 10) ################################################## # Connections ################################################## self.msg_connect((self.blocks_message_strobe_0, 'strobe'), (self.capture_tools_bit_sniffer_0, 'packets'))
def post_message(self): msg_dict = pmt.make_dict() msg_dict = pmt.dict_add(msg_dict, pmt.intern("scenario_number"), pmt.from_long(long(self.scenario))) msg_dict = pmt.dict_add(msg_dict, pmt.intern("scenario_channels"), pmt.to_pmt([bool(c) for c in self.chan_occupied])) msg_dict = pmt.dict_add(msg_dict, pmt.intern("scenario_tau"), pmt.from_long(long(self.scenario_send_window[self.scenario]))) self.message_port_pub(pmt.intern("scenario"), msg_dict)
def propagate_headers(options, args): infile = args[0] outfile = args[1] infile_hdr = infile + ".hdr" outfile_hdr = outfile + ".hdr" sample_cnt_end = 0 sample_offset = long(options.start) # Open input header try: handle_in = open(infile_hdr, "rb") except IOError: sys.stderr.write("Unable to open input file header\n") sys.exit(1) # Open output header try: handle_out = open(outfile_hdr, "wb") except IOError: sys.stderr.write("Unable to open output file header\n") sys.exit(1) # Read first header separately to get file type hdr_in, hdr_extra_in, handle_in = read_single_header(handle_in) info_in = parse_file_metadata.parse_header(hdr_in, False) sample_cnt_end += info_in["nitems"] # Parse file type - ensure support for it shortname_intype = find_shortname(info_in["cplx"], info_in["type"], info_in["size"]) if shortname_intype == SNAME_TO_ENUM["unknown"]: sys.stderr.write("Unsupported data type\n") sys.exit(1) if options.output_type == "unknown": shortname_outtype = shortname_intype else: shortname_outtype = SNAME_TO_ENUM[options.output_type] # Calc sample_len from file size if not specified if options.nsamples is not None: sample_len = long(options.nsamples) else: sample_len = os.path.getsize(infile) / SNAME_DEFS[shortname_intype][0] final_index = sample_offset + sample_len # Search input headers until we find the correct one while sample_cnt_end <= sample_offset: hdr_in, hdr_extra_in, handle_in = read_single_header(handle_in) info_in = parse_file_metadata.parse_header(hdr_in, False) sample_cnt_end += info_in["nitems"] time_in = info_in["rx_time"] # Starting sample of current segment sample_cnt_start = sample_cnt_end - info_in["nitems"] # Interpolate new timestamp delta = sample_offset - sample_cnt_start new_ts = time_in + delta / info_in["rx_rate"] # Calc new segment size (samples) if sample_cnt_end > final_index: first_seg_len = final_index - sample_offset else: first_seg_len = sample_cnt_end - sample_offset # Write the first output header hdr_out = hdr_in new_secs = long(new_ts) new_fracs = new_ts - new_secs time_val = pmt.make_tuple(pmt.from_uint64(new_secs), pmt.from_double(new_fracs)) size_val = pmt.from_long(SNAME_DEFS[shortname_outtype][0]) bytes_val = pmt.from_uint64(first_seg_len * SNAME_DEFS[shortname_outtype][0]) type_val = pmt.from_long(SNAME_DEFS[shortname_outtype][2]) hdr_out = pmt.dict_add(hdr_out, pmt.intern("rx_time"), time_val) hdr_out = pmt.dict_add(hdr_out, pmt.intern("bytes"), bytes_val) hdr_out = pmt.dict_add(hdr_out, pmt.intern("type"), type_val) hdr_out = pmt.dict_add(hdr_out, pmt.intern("size"), size_val) hdr_out_str = pmt.serialize_str(hdr_out) + pmt.serialize_str(hdr_extra_in) handle_out.write(hdr_out_str) # Continue reading headers, modifying, and writing last_seg_len = info_in["nitems"] print "sample_cnt_end=%d,final_index=%d" % (sample_cnt_end, final_index) # Iterate through remaining headers while sample_cnt_end < final_index: hdr_in, hdr_extra_in, handle_in = read_single_header(handle_in) info_in = parse_file_metadata.parse_header(hdr_in, False) nitems = info_in["nitems"] sample_cnt_start = sample_cnt_end sample_cnt_end += nitems hdr_out = hdr_in # For last header, adjust segment length accordingly if sample_cnt_end > final_index: last_seg_len = final_index - sample_cnt_start else: last_seg_len = nitems size_val = pmt.from_long(SNAME_DEFS[shortname_outtype][0]) bytes_val = pmt.from_uint64(last_seg_len * SNAME_DEFS[shortname_outtype][0]) type_val = pmt.from_long(SNAME_DEFS[shortname_outtype][2]) hdr_out = pmt.dict_add(hdr_out, pmt.intern("bytes"), bytes_val) hdr_out = pmt.dict_add(hdr_out, pmt.intern("type"), type_val) hdr_out = pmt.dict_add(hdr_out, pmt.intern("size"), size_val) hdr_out_str = pmt.serialize_str(hdr_out) + pmt.serialize_str(hdr_extra_in) handle_out.write(hdr_out_str) if options.verbose: print "Input File:" + infile print "Input Header:" + infile_hdr print "Input Type:" + ENUM_TO_SNAME[shortname_intype] print "Output File:" + outfile print "Output File Length (Samples):%d" % (final_index - sample_offset) print "Output Header:" + outfile_hdr print "File subsection: [%d,%d]" % (sample_offset, final_index) print "Output Type:" + ENUM_TO_SNAME[shortname_outtype] print "First Segment Length: %e samples" % first_seg_len print "Last Segment Length: %e samples" % last_seg_len print "delta=%f,new ts=%f" % (delta, new_ts) # Clean up handle_in.close() handle_out.close() # Return header info return { "infile": infile, "intype": shortname_intype, "outfile": outfile, "outtype": shortname_outtype, "sample_offset": sample_offset, "sample_len": sample_len, }
def handler(self, msg): # get input meta = pmt.car(msg); x = pmt.to_python(pmt.cdr(msg)) if( self.sps == None and len(self.sps_samps) < 10): # compute the cross correlation metric first peak (to find baud rate) (clen, ncut) = (500,500) e = numpy.zeros(clen*2-1) for i in range(1,ncut): c = x[i:i+clen] d = numpy.correlate(c,c, mode='full') e += d # upsample to xcorr to interpolate fractional sym rate e = e[clen-1:] e_upsamp = signal.resample(e, self.upsample_rate*len(e)) e_upsamp = e_upsamp[:len(e_upsamp)/2] #e_upsamp = e_upsamp[0:len(e_upsamp)/2] self.message_port_pub(pmt.intern("autocorr"), pmt.cons(meta, pmt.to_pmt(e_upsamp))) # locate first minimum and next peak (need to see how generalizable this is ... ) firstmin = numpy.argmin(e_upsamp) firstmax = firstmin + numpy.argmax(e_upsamp[firstmin:]) # determine samples per symbol sps = firstmax/self.upsample_rate self.sps_samps.append(sps) self.sps = numpy.mean(self.sps_samps) else: sps = self.sps meta = pmt.dict_add(meta, pmt.intern("meta"), pmt.from_double(sps)); print "sps = %f"%(sps) ovf = [] ovals = {} n_offsets = float(self.n_offsets) nsyms = (len(x)/sps)-1 best = 0 best_syms = None for o in numpy.arange(0,sps,sps/n_offsets): syms = signal.resample(x[o:o+nsyms*sps], len(x[o:])*10/(sps)) syms = syms[0:len(syms)-len(syms)%10] syms = syms.reshape( [len(syms)/10, 10] ) #syms = syms[:,4] syms = syms[:,3:7].mean(1) dist = numpy.mean(numpy.abs(syms)) if dist > best: best = dist best_syms = syms ovals[o] = dist ovf.append(dist) # output timing metric (should look sinusoidal-ish) self.message_port_pub(pmt.intern("timing"), pmt.cons(meta, pmt.to_pmt(ovf))) best_offset = ovals.keys()[ numpy.argmax(ovals.values()) ] meta = pmt.dict_add(meta, pmt.intern("tau"), pmt.from_double(best_offset)); # publish our recovered symbols self.message_port_pub(pmt.intern("pdus"), pmt.cons(meta, pmt.to_pmt(best_syms)))
def test_001(self): N = 1000 outfile = "test_out.dat" detached = False samp_rate = 200000 key = pmt.intern("samp_rate") val = pmt.from_double(samp_rate) extras = pmt.make_dict() extras = pmt.dict_add(extras, key, val) extras_str = pmt.serialize_str(extras) data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) fsnk = blocks.file_meta_sink(gr.sizeof_gr_complex, outfile, samp_rate, 1, blocks.GR_FILE_FLOAT, True, 1000000, extras_str, detached) fsnk.set_unbuffered(True) self.tb.connect(src, fsnk) self.tb.run() fsnk.close() handle = open(outfile, "rb") header_str = handle.read(parse_file_metadata.HEADER_LENGTH) if(len(header_str) == 0): self.assertFalse() try: header = pmt.deserialize_str(header_str) except RuntimeError: self.assertFalse() info = parse_file_metadata.parse_header(header, False) extra_str = handle.read(info["extra_len"]) self.assertEqual(len(extra_str) > 0, True) handle.close() try: extra = pmt.deserialize_str(extra_str) except RuntimeError: self.assertFalse() extra_info = parse_file_metadata.parse_extra_dict(extra, info, False) self.assertEqual(info['rx_rate'], samp_rate) self.assertEqual(pmt.to_double(extra_info['samp_rate']), samp_rate) # Test file metadata source src.rewind() fsrc = blocks.file_meta_source(outfile, False) vsnk = blocks.vector_sink_c() tsnk = blocks.tag_debug(gr.sizeof_gr_complex, "QA") ssnk = blocks.vector_sink_c() self.tb.disconnect(src, fsnk) self.tb.connect(fsrc, vsnk) self.tb.connect(fsrc, tsnk) self.tb.connect(src, ssnk) self.tb.run() fsrc.close() # Test to make sure tags with 'samp_rate' and 'rx_rate' keys # were generated and received correctly. tags = tsnk.current_tags() for t in tags: if(pmt.eq(t.key, pmt.intern("samp_rate"))): self.assertEqual(pmt.to_double(t.value), samp_rate) elif(pmt.eq(t.key, pmt.intern("rx_rate"))): self.assertEqual(pmt.to_double(t.value), samp_rate) # Test that the data portion was extracted and received correctly. self.assertComplexTuplesAlmostEqual(vsnk.data(), ssnk.data(), 5) os.remove(outfile)
def test01(self): a = pmt.intern("a") b = pmt.from_double(123765) d1 = pmt.make_dict() d2 = pmt.dict_add(d1, a, b) print d2
def __init__( self, n_bursts, n_channels, freq_delta, base_freq, dsp_tuning, burst_length, base_time, hop_time, post_tuning=False, tx_gain=0, verbose=False ): gr.hier_block2.__init__( self, "FrequencyHopperSrc", gr.io_signature(1, 1, gr.sizeof_gr_complex), gr.io_signature(1, 1, gr.sizeof_gr_complex), ) n_samples_total = n_bursts * burst_length lowest_frequency = base_freq - numpy.floor(n_channels/2) * freq_delta self.hop_sequence = [lowest_frequency + n * freq_delta for n in range(n_channels)] numpy.random.shuffle(self.hop_sequence) # Repeat that: self.hop_sequence = [self.hop_sequence[x % n_channels] for x in range(n_bursts)] if verbose: print("Hop Frequencies | Hop Pattern") print("=================|================================") for f in self.hop_sequence: print("{:6.3f} MHz | ".format(f/1e6), end='') if n_channels < 50: print(" " * int((f - base_freq) / freq_delta) + "#") else: print("\n") print("=================|================================") # There's no real point in setting the gain via tag for this application, # but this is an example to show you how to do it. gain_tag = gr.tag_t() gain_tag.offset = 0 gain_tag.key = pmt.string_to_symbol('tx_command') gain_tag.value = pmt.to_pmt({'gain': tx_gain}) tag_list = [gain_tag,] for i in range(len(self.hop_sequence)): time = pmt.cons( pmt.from_uint64(int(base_time + i * hop_time+0.01)), pmt.from_double((base_time + i * hop_time+0.01) % 1), ) tune_tag = gr.tag_t() tune_tag.offset = i * burst_length # TODO dsp_tuning should also be able to do post_tuning if i > 0 and post_tuning and not dsp_tuning: tune_tag.offset -= 1 # Move it to last sample of previous burst if dsp_tuning: tune_tag.key = pmt.string_to_symbol('tx_command') tune_tag.value = pmt.to_pmt({'lo_freq': base_freq, 'dsp_freq': base_freq - self.hop_sequence[i]}) tune_tag.value = pmt.dict_add(tune_tag.value, pmt.intern("time"),time) else: tune_tag.key = pmt.string_to_symbol('tx_command') tune_tag.value = pmt.to_pmt({'freq': self.hop_sequence[i]}) tune_tag.value = pmt.dict_add(tune_tag.value, pmt.intern('time'), time) tag_list.append(tune_tag) length_tag = gr.tag_t() length_tag.offset = i * burst_length length_tag.key = pmt.string_to_symbol('packet_len') length_tag.value = pmt.from_long(burst_length) tag_list.append(length_tag) time_tag = gr.tag_t() time_tag.offset = i * burst_length time_tag.key = pmt.string_to_symbol('tx_time') time_tag.value = pmt.make_tuple( pmt.car(time), pmt.cdr(time) ) tag_list.append(time_tag) tag_source = blocks.vector_source_c((1.0,) * n_samples_total, repeat=False, tags=tag_list) mult = blocks.multiply_cc() self.connect(self, mult, self) self.connect(tag_source, (mult, 1))
def handler(self, msg): meta = pmt.car(msg) bits = pmt.cdr(msg) self.npkt = self.npkt + 1 # convert pmt -> int list (of bits) data = pmt.u8vector_elements(bits) ba = bitarray.bitarray(data) datab = ba.tobytes() # print map(lambda x: hex(ord(x)), datab[0:4]); # print 'Received Len Bits= ' + str(len(datab)*8) # print "Rx Packet:" + str(data[0:10]); # print "Rx Packet: "+":".join("{:02x}".format(ord(c)) for c in datab[0:8]) try: (prefix, pktlen) = struct.unpack("<hh", datab[0:4]) pprint.pprint({"prefix": hex(prefix), "pktlen": pktlen, "pktlen_bytes": pktlen / 8}) if not (prefix == 0x1337): print "Deframer: BAD PREFIX!" return # check header crc c2 = binascii.crc32(datab[0:4]) hcrc = struct.unpack("i", datab[4:8])[0] # print "CRC: " + str((c2,hcrc)) if not (c2 == hcrc): print "Deframer: bad header crc" return self.npkt_hok = self.npkt_hok + 1 # make sure we got enough bits for the given header len if len(data) < (pktlen / 8 + 8 + 4): print "Deframer: not enough bits received for full payload!" print "Deframer: pktlen field = %d, received = %d\n" % (pktlen, len(data)) return if not (pktlen % 8 == 0): print "Deframer: payload should be a multiple of 8" return # extract header bytes c1 = binascii.crc32(datab[0 : 8 + pktlen / 8]) payload = datab[8 : 8 + pktlen / 8] # print "RX Payload len = %d"%(len(payload)) # print ":".join("{:02x}".format(ord(c)) for c in datab) # print payload; ex_crc2 = datab[(8 + pktlen / 8) : (8 + pktlen / 8 + 4)] except: print "Not enough data to read! dropping" return try: c1h = struct.unpack("i", ex_crc2)[0] # print "rx payload CRC = %d (%s)"%(c1h, ":".join("{:02x}".format(ord(c)) for c in ex_crc2)) except: print "shortened packet length dropping" return # print "CRC2:" + str((c1, c1h)); if not c1 == c1h: print "Failed payload CRC" return # print "BURST OK!" self.npkt_ok = self.npkt_ok + 1 pct_ok = 100.0 * self.npkt_ok / self.npkt pct_hok = 100.0 * self.npkt_hok / self.npkt print "Deframer: Percent ok = %f (%f header)%%" % (pct_ok, pct_hok) # send it on its way payload = numpy.fromstring(payload, dtype=numpy.uint8) v = pmt.to_pmt(payload) meta = pmt.dict_add(meta, pmt.intern("timestamp"), pmt.from_double(time.time())) meta = pmt.dict_add(meta, pmt.intern("npkt"), pmt.from_long(self.npkt)) meta = pmt.dict_add(meta, pmt.intern("npkt_hok"), pmt.from_long(self.npkt_hok)) meta = pmt.dict_add(meta, pmt.intern("npkt_ok"), pmt.from_long(self.npkt_ok)) meta = pmt.dict_add(meta, pmt.intern("header_pass_rate"), pmt.from_double(pct_hok)) meta = pmt.dict_add(meta, pmt.intern("payload_pass_rate"), pmt.from_double(pct_ok)) pdu = pmt.cons(meta, v) self.message_port_pub(pmt.intern("pdus"), pdu)
def make_header(options, filename): extras_present = False if options.freq is not None: extras_present = True # Open the file and make the header hdr_filename = filename + '.hdr' hdr_file = open(hdr_filename, 'wb') header = pmt.make_dict() # Fill in header vals # TODO - Read this from blocks.METADATA_VERSION ver_val = pmt.from_long(long(0)) rate_val = pmt.from_double(options.sample_rate) time_val = pmt.make_tuple(pmt.from_uint64(options.time_sec), pmt.from_double(options.time_fsec)) ft_to_sz = parse_file_metadata.ftype_to_size # Map shortname to properties enum_type = SNAME_TO_ENUM[options.format] type_props = SNAME_DEFS[enum_type] size_val = pmt.from_long(type_props[0]) cplx_val = pmt.from_bool(type_props[1]) type_val = pmt.from_long(type_props[2]) fmt = type_props[2] file_samp_len = long(options.length) seg_size = long(options.seg_size) bytes_val = pmt.from_uint64(long(seg_size*ft_to_sz[fmt])) # Set header vals header = pmt.dict_add(header, pmt.intern("version"), ver_val) header = pmt.dict_add(header, pmt.intern("size"), size_val) header = pmt.dict_add(header, pmt.intern("type"), type_val) header = pmt.dict_add(header, pmt.intern("cplx"), cplx_val) header = pmt.dict_add(header, pmt.intern("rx_time"), time_val) header = pmt.dict_add(header, pmt.intern("rx_rate"), rate_val) header = pmt.dict_add(header, pmt.intern("bytes"), bytes_val) if extras_present: freq_key = pmt.intern("rx_freq") freq_val = pmt.from_double(options.freq) extras = pmt.make_dict() extras = pmt.dict_add(extras, freq_key, freq_val) extras_str = pmt.serialize_str(extras) start_val = pmt.from_uint64(blocks.METADATA_HEADER_SIZE + len(extras_str)) else: start_val = pmt.from_uint64(blocks.METADATA_HEADER_SIZE) header = pmt.dict_add(header, pmt.intern("strt"), start_val) num_segments = file_samp_len/seg_size if options.verbose: print "Wrote %d headers to: %s (Version %d)" % (num_segments+1, hdr_filename,pmt.to_long(ver_val)) for x in range(0,num_segments,1): # Serialize and write out file if extras_present: header_str = pmt.serialize_str(header) + extras_str else: header_str = pmt.serialize_str(header) hdr_file.write(header_str) # Update header based on sample rate and segment size header = update_timestamp(header,seg_size) # Last header is special b/c file size is probably not mult. of seg_size header = pmt.dict_delete(header,pmt.intern("bytes")) bytes_remaining = ft_to_sz[fmt]*(file_samp_len - num_segments*long(seg_size)) bytes_val = pmt.from_uint64(bytes_remaining) header = pmt.dict_add(header,pmt.intern("bytes"),bytes_val) # Serialize and write out file if extras_present: header_str = pmt.serialize_str(header) + extras_str else: header_str = pmt.serialize_str(header) hdr_file.write(header_str) hdr_file.close()