def test_002_update(self): start_time = 0.1 self.duration = 125000 self.src = blocks.vector_source_c(list(range(self.duration)), False, 1, []) self.throttle = blocks.throttle(gr.sizeof_gr_complex * 1, 250000) self.utag = timing_utils.add_usrp_tags_c(1090e6, 250000, 0, start_time) self.tag_dbg = blocks.tag_debug(gr.sizeof_gr_complex * 1, '', "") self.tb.connect((self.src, 0), (self.throttle, 0)) self.tb.connect((self.throttle, 0), (self.utag, 0)) self.tb.connect((self.utag, 0), (self.tag_dbg, 0)) self.tb.start() time.sleep(.01) #print("Dumping tags") for t in self.tag_dbg.current_tags(): #print( 'Tag:' , t.key, ' ', t.value ) if pmt.eq(t.key, pmt.intern("rx_freq")): self.assertAlmostEqual(1090e6, pmt.to_double(t.value)) if pmt.eq(t.key, pmt.intern("rx_rate")): self.assertAlmostEqual(250000, pmt.to_double(t.value)) self.utag.update_tags(self.makeDict(freq=1091e6, rate=260000, epoch_int=0, epoch_frac=start_time + .3)) time.sleep(.01) #print("Dumping tags") for t in self.tag_dbg.current_tags(): #print( 'Tag:' , t.key, ' ', t.value ) if pmt.eq(t.key, pmt.intern("rx_freq")): self.assertAlmostEqual(1091e6, pmt.to_double(t.value)) if pmt.eq(t.key, pmt.intern("rx_rate")): self.assertAlmostEqual(260000, pmt.to_double(t.value)) time.sleep(.1) self.tb.stop()
def test_tag_propagation(self): N = 10 # Block length stream_sizes = [1,2,3] expected_result = N*(stream_sizes[0]*[1,] +stream_sizes[1]*[2,] +stream_sizes[2]*[3,]) # check the data (result, tags) = self.help_stream_tag_propagation(N, stream_sizes) self.assertFloatTuplesAlmostEqual(expected_result, result, places=6) # check the tags expected_tag_offsets_src1 = [sum(stream_sizes)*i for i in range(N)] expected_tag_offsets_src2 = [stream_sizes[0] +sum(stream_sizes)*i for i in range(N)] expected_tag_offsets_src3 = [stream_sizes[0]+stream_sizes[1] +sum(stream_sizes)*i for i in range(N)] tags_src1 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src1'))] tags_src2 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src2'))] tags_src3 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src3'))] for i in range(len(expected_tag_offsets_src1)): self.assertTrue(expected_tag_offsets_src1[i] == tags_src1[i].offset) for i in range(len(expected_tag_offsets_src2)): self.assertTrue(expected_tag_offsets_src2[i] == tags_src2[i].offset) for i in range(len(expected_tag_offsets_src3)): self.assertTrue(expected_tag_offsets_src3[i] == tags_src3[i].offset)
def msg_handler_analyzed_data_in(self, msg): print("in msg_handler_analyzed_data_in") self.lock() gate_params = pmt.vector_ref(msg, 0) gate_type_PMT = pmt.dict_ref( gate_params, pmt.from_float( quantum_gate_param_type.quantum_gate_param_type.GATE_TYPE), pmt.PMT_NIL) if (pmt.eq(gate_type_PMT, pmt.PMT_NIL)): return gate_type = pmt.to_float(gate_type_PMT) print("gate_params.gate_type=" + str(gate_type)) qubit_id_PMT = pmt.dict_ref( gate_params, pmt.from_float( quantum_gate_param_type.quantum_gate_param_type.QUBIT_ID), pmt.PMT_NIL) if (pmt.eq(qubit_id_PMT, pmt.PMT_NIL)): return qubit_id = pmt.to_float(qubit_id_PMT) print("gate_params.qubit_id=" + str(qubit_id)) if (gate_type == quantum_gate_type.quantum_gate_type.X): print("in msg_handler_analyzed_data_in X gate") #回路を作る RO_STATE = self._qubit_stat_map[qubit_id] if (float(RO_STATE.angle) == 0.0): RO_STATE.angle = 180.0 else: RO_STATE.angle = 0.0 self._qubit_stat_map[qubit_id] = RO_STATE elif (gate_type == quantum_gate_type.quantum_gate_type.RO): print("in msg_handler_analyzed_data_in RO") #回路を実行する RO_STATE = self._qubit_stat_map[qubit_id] SIM_msg = pmt.make_dict() SIM_msg = pmt.dict_add( SIM_msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.ID), pmt.from_float(qubit_id)) SIM_msg = pmt.dict_add( SIM_msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.ANGLE), pmt.from_float(float(RO_STATE.angle))) SIM_msg = pmt.dict_add( SIM_msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.STATE), pmt.from_float(quantum_qubit_RO_state_type. quantum_qubit_RO_state_type.START)) self.message_port_pub(pmt.intern('simulated_data'), SIM_msg) RO_STATE.state = quantum_qubit_RO_state_type.quantum_qubit_RO_state_type.START self._qubit_stat_map[qubit_id] = RO_STATE self.unlock()
def msg_handler_QOBJ_in(self, msg): print("in msg_handler_QOBJ_in") self.lock() qubit_id_PMT = pmt.dict_ref( msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.ID), pmt.PMT_NIL) if (pmt.eq(qubit_id_PMT, pmt.PMT_NIL)): return qubit_id = pmt.to_float(qubit_id_PMT) print("quantum_qubit_param_type.qubit_id=" + str(qubit_id)) if (this._qubit_id != qubit_id): return qubit_angle_PMT = pmt.dict_ref( msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.ANGLE), pmt.PMT_NIL) if (pmt.eq(qubit_angle_PMT, pmt.PMT_NIL)): return qubit_angle = pmt.to_float(qubit_angle_PMT) print("quantum_qubit_param_type.qubit_angle=" + str(qubit_angle)) qubit_pole_PMT = pmt.dict_ref( msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.POLE), pmt.PMT_NIL) if (pmt.eq(qubit_pole_PMT, pmt.PMT_NIL)): return qubit_pole = pmt.to_float(qubit_pole_PMT) print("quantum_qubit_param_type.qubit_pole=" + str(qubit_pole)) e_ops = [sigmax(), sigmay(), sigmaz()] if (qubit_pole == 1.0): Z = sigmaz() else: Z = -sigmaz() if (qubit_angle == 0.0): Q = Z elif (qubit_angle > 0.0): Q = sigmax() * self.angle_converter(qubit_angle) Q = Q + Z elif (): Q = sigmax() * self.angle_converter(-qubit_angle) Q = Q + (-Z) # self._block_view.clear() self._block_view.add_vectors(expect(Q.unit(), e_ops)) self._block_view.make_sphere() self._block_view.show()
def test_tag_propagation(self): N = 10 # Block length stream_sizes = [1,2,3] expected_result0 = N*(stream_sizes[0]*[1,]) expected_result1 = N*(stream_sizes[1]*[2,]) expected_result2 = N*(stream_sizes[2]*[3,]) # check the data (result0, result1, result2) = self.help_stream_tag_propagation(N, stream_sizes) self.assertFloatTuplesAlmostEqual(expected_result0, result0.data(), places=6) self.assertFloatTuplesAlmostEqual(expected_result1, result1.data(), places=6) self.assertFloatTuplesAlmostEqual(expected_result2, result2.data(), places=6) # check the tags - result0 tags = result0.tags() expected_tag_offsets_src1 = list(range(0,stream_sizes[0]*N,stream_sizes[0])) expected_tag_offsets_src2 = list(range(0,stream_sizes[0]*N,stream_sizes[0])) expected_tag_offsets_src3 = list(range(0,stream_sizes[0]*N,stream_sizes[0])) tags_src1 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src1'))] tags_src2 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src2'))] tags_src3 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src3'))] for i in range(len(expected_tag_offsets_src1)): self.assertTrue(expected_tag_offsets_src1[i] == tags_src1[i].offset) for i in range(len(expected_tag_offsets_src2)): self.assertTrue(expected_tag_offsets_src2[i] == tags_src2[i].offset) for i in range(len(expected_tag_offsets_src3)): self.assertTrue(expected_tag_offsets_src3[i] == tags_src3[i].offset) # check the tags - result1 tags = result1.tags() expected_tag_offsets_src1 = list(range(0,stream_sizes[1]*N,stream_sizes[0])) expected_tag_offsets_src2 = list(range(1,stream_sizes[1]*N,stream_sizes[1])) expected_tag_offsets_src3 = list() tags_src1 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src1'))] tags_src2 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src2'))] tags_src3 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src3'))] for i in range(len(expected_tag_offsets_src1)): self.assertTrue(expected_tag_offsets_src1[i] == tags_src1[i].offset) for i in range(len(expected_tag_offsets_src2)): self.assertTrue(expected_tag_offsets_src2[i] == tags_src2[i].offset) for i in range(len(expected_tag_offsets_src3)): self.assertTrue(expected_tag_offsets_src3[i] == tags_src3[i].offset) # check the tags - result2 tags = result2.tags() expected_tag_offsets_src1 = list(range(0,stream_sizes[2]*N,stream_sizes[0])) expected_tag_offsets_src2 = list(range(1,stream_sizes[2]*N,stream_sizes[2])) expected_tag_offsets_src3 = list(range(0,stream_sizes[2]*N,stream_sizes[2])) tags_src1 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src1'))] tags_src2 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src2'))] tags_src3 = [tag for tag in tags if pmt.eq(tag.key, pmt.intern('src3'))] for i in range(len(expected_tag_offsets_src1)): self.assertTrue(expected_tag_offsets_src1[i] == tags_src1[i].offset) for i in range(len(expected_tag_offsets_src2)): self.assertTrue(expected_tag_offsets_src2[i] == tags_src2[i].offset) for i in range(len(expected_tag_offsets_src3)): self.assertTrue(expected_tag_offsets_src3[i] == tags_src3[i].offset)
def tag_handler(self, ninput_items): nread = self.nitems_read(0) #number of items read on port 0 tags = self.get_tags_in_range(0, nread, nread + ninput_items) key_0 = pmt.intern("corr_start") key_1 = pmt.intern("freq_est") if tags: for i in range(len(tags)): if (pmt.eq(key_0, tags[i].key)): offset = tags[i].offset self.delay = offset % self.sps self.corr_start_detected = True if (pmt.eq(key_1, tags[i].key)): self.freq_off = -float(pmt.to_double(tags[i].value))
def test_003_every (self): self.tb = gr.top_block () self.rate = 99999999999 self.interval = 1 self.duration = 4321 self.src = blocks.vector_source_c(list(range(self.duration)), False, 1, []) self.utag = timing_utils.add_usrp_tags_c(1090e6, self.rate, 0, .98765) self.tags = timing_utils.tag_uhd_offset_c(self.rate, self.interval) self.tag_dbg = blocks.tag_debug(gr.sizeof_gr_complex*1, "", ""); self.tag_dbg.set_display(False) self.tb.connect((self.src, 0), (self.utag, 0)) self.tb.connect((self.utag, 0), (self.tags, 0)) self.tb.connect((self.tags, 0), (self.tag_dbg, 0)) e_n_tags = int(ceil(1.0*self.duration / self.interval)) + 3 self.tb.run () tags = self.tag_dbg.current_tags() tprev = None for t in tags: if pmt.eq(t.key, pmt.intern("rx_time_offset")): self.assertAlmostEqual(self.rate, pmt.to_double(pmt.tuple_ref(t.value, 3)),-4) self.assertEqual(t.offset, pmt.to_uint64(pmt.tuple_ref(t.value, 2))) self.assertTrue((pmt.to_uint64(pmt.tuple_ref(t.value, 2)) / (1.0*self.interval)).is_integer()) if tprev is not None: tcur = pmt.to_uint64(pmt.tuple_ref(t.value, 0)) + pmt.to_double(pmt.tuple_ref(t.value, 1)) self.assertAlmostEqual(tcur-tprev, 1.0*self.interval / self.rate) tprev = tcur else: tprev = pmt.to_uint64(pmt.tuple_ref(t.value, 0)) + pmt.to_double(pmt.tuple_ref(t.value, 1)) self.assertEqual(self.tag_dbg.num_tags(), e_n_tags) self.tb = None
def tag_propagation(self, ninput_items): self.set_tag_propagation_policy(0) nread = self.nitems_read(0) #number of items read on port 0 tags = self.get_tags_in_range(0, nread, nread + ninput_items) key_1 = pmt.intern("packet_len") key_2 = pmt.intern("pdu_len") if tags: for i in range(len(tags)): if (pmt.eq(key_1, tags[i].key)): offset = int(tags[i].offset / self.factor) value = pmt.from_long(int(pmt.to_long(tags[i].value))) self.add_item_tag(0, offset, key_1, value) if (pmt.eq(key_2, tags[i].key)): offset = int(tags[i].offset / self.factor) value = pmt.from_long(int(pmt.to_long(tags[i].value))) self.add_item_tag(0, offset, key_2, value)
def test_002_tags(self): start_time = 0.1 self.duration = 125000 tnow = time.time() src_tag = gr.tag_utils.python_to_tag([0, pmt.intern("wall_clock_time"), pmt.from_double(tnow - 10000), pmt.intern("test_002_tags")]) self.src = blocks.vector_source_c(list(range(self.duration)), False, 1, [src_tag]) self.throttle = blocks.throttle(gr.sizeof_gr_complex*1, 250000) self.dut = timing_utils.system_time_diff_c(True, True, False) self.tag_dbg = blocks.tag_debug(gr.sizeof_gr_complex*1, '', ""); self.tb.connect((self.src, 0), (self.throttle, 0)) self.tb.connect((self.throttle, 0), (self.dut, 0)) self.tb.connect((self.dut, 0), (self.tag_dbg, 0)) self.tb.start() time.sleep(.01) tags = self.tag_dbg.current_tags(); print("Dumping tags") for t in tags: print( 'Tag:' , t.key, ' ', t.value ) if pmt.eq(t.key, pmt.intern("wall_clock_time")): time_tag = t; if time_tag: self.assertAlmostEqual( tnow, pmt.to_double(time_tag.value), delta=60 ) else: self.assertTrue( False ) time.sleep(.1) self.tb.stop()
def test_002_1tag(self): ''' Tests a stream with a single tag ''' src_tag = gr.tag_utils.python_to_tag([ 0, pmt.intern("sam"), pmt.from_double(10000), pmt.intern("test_002_1tag") ]) src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10) src = blocks.vector_source_i(src_data, False, 1, [src_tag]) dut = sandia_utils.sandia_tag_debug(gr.sizeof_int, "tag QA") self.tb.connect(src, dut) self.tb.run() self.assertEqual(1, dut.num_tags()) tag0 = dut.get_tag(0) self.assertTrue(pmt.eq(tag0.key, pmt.intern("sam"))) self.assertAlmostEqual(10000, pmt.to_double(tag0.value))
def tag_handler(self, n_input_items): n_read = self.nitems_read(0) #number of items read on port 0 tags = self.get_tags_in_range(0, n_read, n_read + n_input_items) key = pmt.intern("packet_len") if tags: for i in range(len(tags)): if (pmt.eq(key, tags[i].key)): self.PN9 = np.ones((9, ), dtype=int)
def test_003_tags(self): ''' Tests a stream that has multiple tags inside it ''' src_tag1 = gr.tag_utils.python_to_tag([ 0, pmt.intern("sam"), pmt.from_double(10000), pmt.intern("test_003_tags") ]) src_tag2 = gr.tag_utils.python_to_tag([ 1, pmt.intern("peter"), pmt.from_double(1000), pmt.intern("test_003_tags") ]) src_tag3 = gr.tag_utils.python_to_tag([ 2, pmt.intern("jacob"), pmt.from_double(100), pmt.intern("test_003_tags") ]) src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10) src = blocks.vector_source_i(src_data, False, 1, [src_tag1, src_tag2, src_tag3]) dut = sandia_utils.sandia_tag_debug(gr.sizeof_int, "tag QA") self.tb.connect(src, dut) self.tb.run() self.assertEqual(3, dut.num_tags()) tag0 = dut.get_tag(0) tag1 = dut.get_tag(1) tag2 = dut.get_tag(2) self.assertTrue(pmt.eq(tag0.key, pmt.intern("sam"))) self.assertAlmostEqual(10000, pmt.to_double(tag0.value)) self.assertTrue(pmt.eq(tag1.key, pmt.intern("peter"))) self.assertAlmostEqual(1000, pmt.to_double(tag1.value)) self.assertTrue(pmt.eq(tag2.key, pmt.intern("jacob"))) self.assertAlmostEqual(100, pmt.to_double(tag2.value))
def test_003_snr(self): nframes = 30 timeslots = 5 subcarriers = 1024 active_subcarriers = 936 overlap = 2 cp_len = subcarriers // 2 ramp_len = cp_len // 2 active_ratio = subcarriers / active_subcarriers subcarrier_map = get_subcarrier_map(subcarriers, active_subcarriers, dc_free=True) preambles = mapped_preamble(self.seed, self.filtertype, self.filteralpha, active_subcarriers, subcarriers, subcarrier_map, overlap, cp_len, ramp_len) core_preamble = preambles[1] sigenergy = calculate_energy(core_preamble) data = np.copy(core_preamble) snrs = np.arange(3, 3 * nframes, 3, dtype=np.float) snrs_lin = 10. ** (snrs / 10.) expected_snrs_lin = np.concatenate(((np.inf,), snrs_lin)) for i, snr_lin in enumerate(snrs_lin): nscale = calculate_noise_scale( snr_lin, sigenergy, active_ratio, core_preamble.size) noise = get_noise_vector(core_preamble.size, nscale) d = core_preamble + noise data = np.concatenate((data, d)) dut = gfdm.channel_estimator_cc( timeslots, subcarriers, active_subcarriers, True, 1, core_preamble) src = blocks.vector_source_c(data) snk = blocks.vector_sink_c() self.tb.connect(src, dut, snk) self.tb.run() res = np.array(snk.data()) self.assertEqual(res.size, nframes * timeslots * subcarriers) tags = snk.tags() snr_tags = [t for t in tags if pmt.eq(t.key, pmt.mp("snr_lin"))] for i, t in enumerate(snr_tags): self.assertEqual(t.offset, i * timeslots * subcarriers) res_lin = pmt.to_float(t.value) res_db = 10. * np.log10(res_lin) ref_db = 10. * np.log10(expected_snrs_lin[i]) # print(f"Reference: {ref_db:6.3f}dB\t{res_db:6.3f}dB") if np.isfinite(ref_db): self.assertTrue(np.abs(res_db - ref_db) < 1.)
def wait_for_tag(self, in0): nread = self.nitems_read(0) #number of items read on port 0 tags = self.get_tags_in_range(0, nread, nread + self.n_input_items) key = pmt.intern("corr_start") if tags: for i in range(len(tags)): if (pmt.eq(key, tags[i].key)): self.offset = tags[i].offset self.num = 1 else: self.num = 0
def extract_pdu_header(header): d = pmt.to_long(pmt.dict_ref(header, pmt.intern('dest_id'), pmt.PMT_NIL)) s = pmt.to_long(pmt.dict_ref(header, pmt.intern('src_id'), pmt.PMT_NIL)) f = pmt.to_long(pmt.dict_ref(header, pmt.intern('frame_num'), pmt.PMT_NIL)) c = pmt.dict_ref(header, pmt.intern('checksum'), pmt.PMT_NIL) if pmt.is_u8vector(c): c = pmt_u8vector_to_ndarray(c) elif pmt.eq(c, pmt.PMT_T): c = True else: c = False return d, s, f, c
def recalc_msg(self, msg): if pmt.is_pair(msg): key = pmt.car(msg) val = pmt.cdr(msg) if pmt.eq(key, pmt.intern("recalc")): if pmt.is_integer(val): if pmt.to_long(val) == 10: global start start = 0 if pmt.to_long(val) == 20: global start start = 1
def recalc_msg(self, msg): if pmt.is_pair(msg): key = pmt.car(msg) val = pmt.cdr(msg) if pmt.eq(key, pmt.intern("recalc")): if pmt.is_integer(val): if pmt.to_long(val) == 10: global file_written file_written = 1 if pmt.to_long(val) == 20: global file_written file_written = 0 print "hello"
def handle_msg(self, msg): msgs = pmt.cdr(msg) msg_str = "".join([chr(x) for x in pmt.u8vector_elements(msgs)]) print msg_str print self.message self.message = msg self.message_port_pub(pmt.intern('message_stream out'), self.message) if pmt.eq(self.message, msg): print "No change" pass else: print "Changed" self.message = msg
def set_signal_number_msg(self, msg): if pmt.is_pair(msg): key = pmt.car(msg) val = pmt.cdr(msg) if pmt.eq(key, pmt.string_to_symbol("signal_number_msg")): if pmt.is_integer(val): self.n = pmt.to_long(val) else: print("DoA Esprit: Not an integer") else: print("DoA Esprit: Key not 'signal_number_msg'") else: print("DoA Esprit: Not a tuple")
def tag_handler(self, ninput_items): self.set_tag_propagation_policy(0) nread = self.nitems_read(0) #number of items read on port 0 tags = self.get_tags_in_range(0, nread, nread + ninput_items) key = pmt.intern("phr_start") if tags: for i in range(len(tags)): if (pmt.eq(key, tags[i].key)): offset = tags[i].offset self.delay = offset % self.sps offset = int(offset / self.sps) value = pmt.from_double(int(pmt.to_double(tags[i].value))) self.add_item_tag(0, offset, key, value)
def tag_handler(self, ninput_items): self.set_tag_propagation_policy(0) nread = self.nitems_read(0) #number of items read on port 0 tags = self.get_tags_in_range(0, nread, nread + ninput_items) key_1 = pmt.intern("corr_start") key_2 = pmt.intern("phr_start") key_3 = pmt.intern("freq_est") key_4 = pmt.intern("SFD_start") if tags: for i in range(len(tags)): if (pmt.eq(key_1, tags[i].key)): offset = tags[i].offset value = pmt.from_double(float(pmt.to_double( tags[i].value))) self.add_item_tag(0, offset, key_1, value) if (pmt.eq(key_2, tags[i].key)): offset = int(tags[i].offset / self.sps) value = pmt.from_double(float(pmt.to_double( tags[i].value))) self.add_item_tag(0, offset, key_2, value) if (pmt.eq(key_3, tags[i].key)): self.freq_off = 0 #-float(pmt.to_double(tags[i].value)) if (pmt.eq(key_4, tags[i].key)): self.delay = tags[i].offset % self.sps
def wait_tags(self, in0, out): n_read = self.nitems_read(0) #Number of items read on port 0 tags = self.get_tags_in_range(0, n_read, n_read+self.n_input_items) key = pmt.intern("pdu_len") if tags: for i in range(len(tags)): if (pmt.eq(key, tags[i].key)): self.offset_rel = tags[i].offset - n_read self.pdu_len = pmt.to_long(tags[i].value) * 8 self.add_item_tag(0, self.offset_tag, key, pmt.from_long(self.pdu_len)) self.offset_tag = self.offset_tag + self.pdu_len self.num = 1 break self.consume(0,self.offset_rel) else: self.num = 0
def handle_msg(self, msg_pmt): srcId = pmt.dict_ref(pmt.car(msg_pmt), pmt.intern('SNET SrcId'), pmt.PMT_NIL) if pmt.eq(srcId, pmt.PMT_NIL): return sat = pmt.to_long(srcId) >> 1 if sat == 0: satellite = 'SNET-A' elif sat == 1: satellite = 'SNET-B' elif sat == 2: satellite = 'SNET-C' elif sat == 3: satellite = 'SNET-D' else: return self.message_port_pub(pmt.intern(satellite), msg_pmt)
def test_001(self): N = 1000 outfile = "test_out.dat" detached = False samp_rate = 200000 key = pmt.intern("samp_rate") val = pmt.from_double(samp_rate) extras = pmt.make_dict() extras = pmt.dict_add(extras, key, val) data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) fsnk = blocks.file_meta_sink(gr.sizeof_gr_complex, outfile, samp_rate, 1, blocks.GR_FILE_FLOAT, True, 1000000, extras, detached) fsnk.set_unbuffered(True) self.tb.connect(src, fsnk) self.tb.run() fsnk.close() handle = open(outfile, "rb") header_str = handle.read(blocks.parse_file_metadata.HEADER_LENGTH) if(len(header_str) == 0): self.assertFalse() try: header = pmt.deserialize_str(header_str) except RuntimeError: self.assertFalse() info = blocks.parse_header(header, False) extra_str = handle.read(info["extra_len"]) self.assertEqual(len(extra_str) > 0, True) handle.close() try: extra = pmt.deserialize_str(extra_str) except RuntimeError: self.assertFalse() extra_info = blocks.parse_extra_dict(extra, info, False) self.assertEqual(info['rx_rate'], samp_rate) self.assertEqual(pmt.to_double(extra_info['samp_rate']), samp_rate) # Test file metadata source src.rewind() fsrc = blocks.file_meta_source(outfile, False) vsnk = blocks.vector_sink_c() tsnk = blocks.tag_debug(gr.sizeof_gr_complex, "QA") ssnk = blocks.vector_sink_c() self.tb.disconnect(src, fsnk) self.tb.connect(fsrc, vsnk) self.tb.connect(fsrc, tsnk) self.tb.connect(src, ssnk) self.tb.run() fsrc.close() # Test to make sure tags with 'samp_rate' and 'rx_rate' keys # were generated and received correctly. tags = tsnk.current_tags() for t in tags: if(pmt.eq(t.key, pmt.intern("samp_rate"))): self.assertEqual(pmt.to_double(t.value), samp_rate) elif(pmt.eq(t.key, pmt.intern("rx_rate"))): self.assertEqual(pmt.to_double(t.value), samp_rate) # Test that the data portion was extracted and received correctly. self.assertComplexTuplesAlmostEqual(vsnk.data(), ssnk.data(), 5) os.remove(outfile)
def msg_handler_analyzed_data_in_circuit(self, msg): print("in msg_handler_analyzed_data_in") self.lock() gate_params = pmt.vector_ref(msg, 0) gate_type_PMT = pmt.dict_ref( gate_params, pmt.from_float( quantum_gate_param_type.quantum_gate_param_type.GATE_TYPE), pmt.PMT_NIL) if (pmt.eq(gate_type_PMT, pmt.PMT_NIL)): return gate_type = pmt.to_float(gate_type_PMT) print("gate_params.gate_type=" + str(gate_type)) qubit_id_PMT = pmt.dict_ref( gate_params, pmt.from_float( quantum_gate_param_type.quantum_gate_param_type.QUBIT_ID), pmt.PMT_NIL) if (pmt.eq(qubit_id_PMT, pmt.PMT_NIL)): return qubit_id = pmt.to_float(qubit_id_PMT) print("gate_params.qubit_id=" + str(qubit_id)) if (gate_type == quantum_gate_type.quantum_gate_type.X): print("in msg_handler_analyzed_data_in X gate") self._qubit_circuit.add_gate("RX", targets=qubit_id, arg_value=pi, index=this._qcircuit_index_cnt) elif (gate_type == quantum_gate_type.quantum_gate_type.Y): print("in msg_handler_analyzed_data_in Y gate") self._qubit_circuit.add_gate("RY", targets=qubit_id, arg_value=pi, index=this._qcircuit_index_cnt) elif (gate_type == quantum_gate_type.quantum_gate_type.Z): print("in msg_handler_analyzed_data_in Z gate") self._qubit_circuit.add_gate("RZ", targets=qubit_id, arg_value=pi, index=this._qcircuit_index_cnt) elif (gate_type == quantum_gate_type.quantum_gate_type.H): print("in msg_handler_analyzed_data_in H gate") self._qubit_circuit.add_gate("hadamard_transform", targets=qubit_id, index=this._qcircuit_index_cnt) elif (gate_type == quantum_gate_type.quantum_gate_type.S): print("in msg_handler_analyzed_data_in S gate") self._qubit_circuit.add_gate("RZ", targets=qubit_id, arg_value=pi / 2, index=this._qcircuit_index_cnt) elif (gate_type == quantum_gate_type.quantum_gate_type.T): print("in msg_handler_analyzed_data_in T gate") self._qubit_circuit.add_gate("RZ", targets=qubit_id, arg_value=pi / 4, index=this._qcircuit_index_cnt) elif (gate_type == quantum_gate_type.quantum_gate_type.INIT): print("in msg_handler_analyzed_data_in INIT gate") pass elif (gate_type == quantum_gate_type.quantum_gate_type.CNOT): print("in msg_handler_analyzed_data_in CNOT gate") self._qubit_circuit.add_gate("CNOT", controls=[0], targets=[1]) elif (gate_type == quantum_gate_type.quantum_gate_type.JUNC): print("in msg_handler_analyzed_data_in JUNC gate") pass elif (gate_type == quantum_gate_type.quantum_gate_type.RO): print("in msg_handler_analyzed_data_in RO") #回路を実行する RO_STATE = self._qubit_stat_map[qubit_id] SIM_msg = pmt.make_dict() SIM_msg = pmt.dict_add( SIM_msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.ID), pmt.from_float(qubit_id)) SIM_msg = pmt.dict_add( SIM_msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.ANGLE), pmt.from_float(float(RO_STATE.angle))) SIM_msg = pmt.dict_add( SIM_msg, pmt.from_float( quantum_qubit_param_type.quantum_qubit_param_type.STATE), pmt.from_float(quantum_qubit_RO_state_type. quantum_qubit_RO_state_type.START)) self.message_port_pub(pmt.intern('simulated_data'), SIM_msg) RO_STATE.state = quantum_qubit_RO_state_type.quantum_qubit_RO_state_type.START self._qubit_stat_map[qubit_id] = RO_STATE self.unlock()
def test_interned_string_constants(self): assert (pmt.eq(timing_utils.PMTCONSTSTR__time(), pmt.intern('time'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__trig(), pmt.intern('trig'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__set(), pmt.intern('set'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__disarm(), pmt.intern('disarm'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__rx_time(), pmt.intern('rx_time'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__rx_rate(), pmt.intern('rx_rate'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__rx_freq(), pmt.intern('rx_freq'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__rx_sample(), pmt.intern('rx_sample'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__freq(), pmt.intern('freq'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__wall_clock_time(), pmt.intern('wall_clock_time'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__pdu_out(), pmt.intern('pdu_out'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__pdu_in(), pmt.intern('pdu_in'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__trigger_time(), pmt.intern('trigger_time'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__trigger_sample(), pmt.intern('trigger_sample'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__trigger_now(), pmt.intern('trigger_now'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__late_delta(), pmt.intern('late_delta'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__command(), pmt.intern('command'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__set_freq(), pmt.intern('set_freq'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__in(), pmt.intern('in'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__dsp_freq(), pmt.intern('dsp_freq'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__START(), pmt.intern('START'))) assert (pmt.eq(timing_utils.PMTCONSTSTR__END(), pmt.intern('END')))
def test_interned_string_constants(self): assert (pmt.eq(pdu_utils.PMTCONSTSTR__pdu_in(), pmt.intern("pdu_in"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__msg(), pmt.intern("msg"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__in(), pmt.intern("in"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__out(), pmt.intern("out"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__pdu_in(), pmt.intern("pdu_in"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__pdu_out(), pmt.intern("pdu_out"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__cpdus(), pmt.intern("cpdus"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__fpdus(), pmt.intern("fpdus"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__burst_time(), pmt.intern("burst_time"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__start_time(), pmt.intern("start_time"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__start_time_offset(), pmt.intern("start_time_offset"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__rx_time(), pmt.intern("rx_time"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__tx_time(), pmt.intern("tx_time"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__uhd_time_tuple(), pmt.intern("uhd_time_tuple"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__dict(), pmt.intern("dict"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__data(), pmt.intern("data"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__emit(), pmt.intern("emit"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__head(), pmt.intern("head"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__tail(), pmt.intern("tail"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__ctrl(), pmt.intern("ctrl"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__conf(), pmt.intern("conf"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__val(), pmt.intern("val"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__key(), pmt.intern("key"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__set_val(), pmt.intern("set_val"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__set_key(), pmt.intern("set_key"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__bursts(), pmt.intern("bursts"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__detects(), pmt.intern("detects"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__debug(), pmt.intern("debug"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__zeroX(), pmt.intern("zeroX"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__window(), pmt.intern("window"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__burst_id(), pmt.intern("burst_id"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__burst_index(), pmt.intern("burst_index"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__eob_offset(), pmt.intern("eob_offset"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__eob_alignment(), pmt.intern("eob_alignment"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__sample_rate(), pmt.intern("sample_rate"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__symbol_rate(), pmt.intern("symbol_rate"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__tx_sob(), pmt.intern("tx_sob"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__tx_eob(), pmt.intern("tx_eob"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__pdu_num(), pmt.intern("pdu_num"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__time_type(), pmt.intern("time_type"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__wall_clock_time(), pmt.intern("wall_clock_time"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__duration(), pmt.intern("duration"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__set_trigger_tag(), pmt.intern("set_trigger_tag"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__set_arming_tag(), pmt.intern("set_arming_tag"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__set_tx_limit(), pmt.intern("set_tx_limit"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__set_delays(), pmt.intern("set_delays"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__set_message(), pmt.intern("set_message"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__set_holdoff(), pmt.intern("set_holdoff"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__set_armed(), pmt.intern("set_armed"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__trigger_now(), pmt.intern("trigger_now"))) assert (pmt.eq(pdu_utils.PMTCONSTSTR__system(), pmt.intern("system")))
def test_001(self): N = 1000 outfile = "test_out.dat" detached = False samp_rate = 200000 key = pmt.intern("samp_rate") val = pmt.from_double(samp_rate) extras = pmt.make_dict() extras = pmt.dict_add(extras, key, val) extras_str = pmt.serialize_str(extras) data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) fsnk = blocks.file_meta_sink(gr.sizeof_gr_complex, outfile, samp_rate, 1, blocks.GR_FILE_FLOAT, True, 1000000, extras_str, detached) fsnk.set_unbuffered(True) self.tb.connect(src, fsnk) self.tb.run() fsnk.close() handle = open(outfile, "rb") header_str = handle.read(parse_file_metadata.HEADER_LENGTH) if(len(header_str) == 0): self.assertFalse() try: header = pmt.deserialize_str(header_str) except RuntimeError: self.assertFalse() info = parse_file_metadata.parse_header(header, False) extra_str = handle.read(info["extra_len"]) self.assertEqual(len(extra_str) > 0, True) handle.close() try: extra = pmt.deserialize_str(extra_str) except RuntimeError: self.assertFalse() extra_info = parse_file_metadata.parse_extra_dict(extra, info, False) self.assertEqual(info['rx_rate'], samp_rate) self.assertEqual(pmt.to_double(extra_info['samp_rate']), samp_rate) # Test file metadata source src.rewind() fsrc = blocks.file_meta_source(outfile, False) vsnk = blocks.vector_sink_c() tsnk = blocks.tag_debug(gr.sizeof_gr_complex, "QA") ssnk = blocks.vector_sink_c() self.tb.disconnect(src, fsnk) self.tb.connect(fsrc, vsnk) self.tb.connect(fsrc, tsnk) self.tb.connect(src, ssnk) self.tb.run() fsrc.close() # Test to make sure tags with 'samp_rate' and 'rx_rate' keys # were generated and received correctly. tags = tsnk.current_tags() for t in tags: if(pmt.eq(t.key, pmt.intern("samp_rate"))): self.assertEqual(pmt.to_double(t.value), samp_rate) elif(pmt.eq(t.key, pmt.intern("rx_rate"))): self.assertEqual(pmt.to_double(t.value), samp_rate) # Test that the data portion was extracted and received correctly. self.assertComplexTuplesAlmostEqual(vsnk.data(), ssnk.data(), 5) os.remove(outfile)
def test_009_dual_tags_nostore(self): ''' This test has 2 sources each with tags ''' src_tag1 = gr.tag_utils.python_to_tag([ 0, pmt.intern("sam"), pmt.from_double(10000), pmt.intern("test_003_tags") ]) src_tag2 = gr.tag_utils.python_to_tag([ 1, pmt.intern("peter"), pmt.from_double(1000), pmt.intern("test_003_tags") ]) src_tag3 = gr.tag_utils.python_to_tag([ 2, pmt.intern("jacob"), pmt.from_double(100), pmt.intern("test_003_tags") ]) src_tag4 = gr.tag_utils.python_to_tag([ 2, pmt.intern("chip"), pmt.from_double(10), pmt.intern("test_003_tags") ]) src_tag5 = gr.tag_utils.python_to_tag([ 2, pmt.intern("josh"), pmt.from_double(1), pmt.intern("test_003_tags") ]) src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10) src1 = blocks.vector_source_i(src_data, False, 1, [src_tag1, src_tag2, src_tag3]) src2 = blocks.vector_source_i(src_data, False, 1, [src_tag4, src_tag5]) dut = sandia_utils.sandia_tag_debug(gr.sizeof_int, "tag QA", "", False) self.tb.connect(src1, (dut, 0)) self.tb.connect(src2, (dut, 1)) self.tb.run() self.assertEqual(5, dut.num_tags()) tag0 = dut.get_tag(0) tag1 = dut.get_tag(1) tag2 = dut.get_tag(2) tag3 = dut.get_tag(3) tag4 = dut.get_tag(4) self.assertTrue(pmt.eq(tag0.key, pmt.intern("sam"))) self.assertAlmostEqual(10000, pmt.to_double(tag0.value)) self.assertTrue(pmt.eq(tag1.key, pmt.intern("peter"))) self.assertAlmostEqual(1000, pmt.to_double(tag1.value)) self.assertTrue(pmt.eq(tag2.key, pmt.intern("jacob"))) self.assertAlmostEqual(100, pmt.to_double(tag2.value)) self.assertTrue(pmt.eq(tag3.key, pmt.intern("chip"))) self.assertAlmostEqual(10, pmt.to_double(tag3.value)) self.assertTrue(pmt.eq(tag4.key, pmt.intern("josh"))) self.assertAlmostEqual(1, pmt.to_double(tag4.value)) self.tb.stop() self.tb.wait()
def test_005_multiWork(self): ''' This test is testing multiple calls to the sandia_tag_debug::work function to ensure tags are all being saved. ''' src_tag1 = gr.tag_utils.python_to_tag([ 0, pmt.intern("sam"), pmt.from_double(10000), pmt.intern("test_003_tags") ]) src_tag2 = gr.tag_utils.python_to_tag([ 1, pmt.intern("peter"), pmt.from_double(1000), pmt.intern("test_003_tags") ]) src_tag3 = gr.tag_utils.python_to_tag([ 2, pmt.intern("jacob"), pmt.from_double(100), pmt.intern("test_003_tags") ]) src_tag4 = gr.tag_utils.python_to_tag([ 2, pmt.intern("chip"), pmt.from_double(10), pmt.intern("test_003_tags") ]) src_tag5 = gr.tag_utils.python_to_tag([ 2, pmt.intern("josh"), pmt.from_double(1), pmt.intern("test_003_tags") ]) src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10) src = blocks.vector_source_i(src_data, False, 1, [src_tag1, src_tag2, src_tag3]) dut = sandia_utils.sandia_tag_debug(gr.sizeof_int, "tag QA") self.tb.connect(src, dut) #Run one of the TB self.tb.run() self.assertEqual(3, dut.num_tags()) tag0 = dut.get_tag(0) tag1 = dut.get_tag(1) tag2 = dut.get_tag(2) self.assertTrue(pmt.eq(tag0.key, pmt.intern("sam"))) self.assertAlmostEqual(10000, pmt.to_double(tag0.value)) self.assertTrue(pmt.eq(tag1.key, pmt.intern("peter"))) self.assertAlmostEqual(1000, pmt.to_double(tag1.value)) self.assertTrue(pmt.eq(tag2.key, pmt.intern("jacob"))) self.assertAlmostEqual(100, pmt.to_double(tag2.value)) self.tb.stop() self.tb.wait() #Run two of the TB src.set_data(src_data, [src_tag4, src_tag5]) self.tb.run() self.assertEqual(5, dut.num_tags()) tag3 = dut.get_tag(3) tag4 = dut.get_tag(4) self.assertTrue(pmt.eq(tag0.key, pmt.intern("sam"))) self.assertAlmostEqual(10000, pmt.to_double(tag0.value)) self.assertTrue(pmt.eq(tag1.key, pmt.intern("peter"))) self.assertAlmostEqual(1000, pmt.to_double(tag1.value)) self.assertTrue(pmt.eq(tag2.key, pmt.intern("jacob"))) self.assertAlmostEqual(100, pmt.to_double(tag2.value)) self.assertTrue(pmt.eq(tag3.key, pmt.intern("chip"))) self.assertAlmostEqual(10, pmt.to_double(tag3.value)) self.assertTrue(pmt.eq(tag4.key, pmt.intern("josh"))) self.assertAlmostEqual(1, pmt.to_double(tag4.value)) self.tb.stop() self.tb.wait()
def test_interned_string_constants(self): assert (pmt.eq(fhss_utils.PMTCONSTSTR__in(), pmt.intern("in"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__out(), pmt.intern("out"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__center_frequency(), pmt.intern("center_frequency"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__relative_frequency(), pmt.intern("relative_frequency"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__sample_rate(), pmt.intern("sample_rate"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__bandwidth(), pmt.intern("bandwidth"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__pwr_db(), pmt.intern("pwr_db"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__snr_db(), pmt.intern("snr_db"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__debug(), pmt.intern("debug"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__rx_freq(), pmt.intern("rx_freq"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__burst_id(), pmt.intern("burst_id"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__magnitude(), pmt.intern("magnitude"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__noise_density(), pmt.intern("noise_density"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__new_burst(), pmt.intern("new_burst"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__gone_burst(), pmt.intern("gone_burst"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__rx_time(), pmt.intern("rx_time"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__start_time(), pmt.intern("start_time"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__duration(), pmt.intern("duration"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__cpdus(), pmt.intern("cpdus"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__start_offset(), pmt.intern("start_offset"))) assert (pmt.eq(fhss_utils.PMTCONSTSTR__end_offset(), pmt.intern("end_offset")))