def test_001(self):
        N = 1000
	outfile = "test_out.dat"

        detached = False
        samp_rate = 200000
        key = pmt.pmt_intern("samp_rate")
        val = pmt.pmt_from_double(samp_rate)
        extras = pmt.pmt_make_dict()
        extras = pmt.pmt_dict_add(extras, key, val)
        extras_str = pmt.pmt_serialize_str(extras)

        data = sig_source_c(samp_rate, 1000, 1, N)
        src  = gr.vector_source_c(data)
        fsnk = blocks.file_meta_sink(gr.sizeof_gr_complex, outfile,
                                     samp_rate, 1, 
                                     blocks.GR_FILE_FLOAT, True,
                                     1000000, extras_str, detached)
        fsnk.set_unbuffered(True)

	self.tb.connect(src, fsnk)
	self.tb.run()
        fsnk.close()

        handle = open(outfile, "rb")
        header_str = handle.read(parse_file_metadata.HEADER_LENGTH)
        if(len(header_str) == 0):
            self.assertFalse()

        try:
            header = pmt.pmt_deserialize_str(header_str)
        except RuntimeError:
            self.assertFalse()

        print header
        info = parse_file_metadata.parse_header(header, False)

        extra_str = handle.read(info["extra_len"])
        self.assertGreater(len(extra_str), 0)
        handle.close()

        try:
            extra = pmt.pmt_deserialize_str(extra_str)
        except RuntimeError:
            self.assertFalse()

        extra_info = parse_file_metadata.parse_extra_dict(extra, info, False)

        self.assertEqual(info['rx_rate'], samp_rate)
        self.assertEqual(pmt.pmt_to_double(extra_info['samp_rate']), samp_rate)


        # Test file metadata source
        src.rewind()
        fsrc = blocks.file_meta_source(outfile, False)
        vsnk = gr.vector_sink_c()
        tsnk = gr.tag_debug(gr.sizeof_gr_complex, "QA")
        ssnk = gr.vector_sink_c()
        self.tb.disconnect(src, fsnk)
        self.tb.connect(fsrc, vsnk)
        self.tb.connect(fsrc, tsnk)
        self.tb.connect(src, ssnk)
        self.tb.run()

        # Test to make sure tags with 'samp_rate' and 'rx_rate' keys
        # were generated and received correctly.
        tags = tsnk.current_tags()
        for t in tags:
            if(pmt.pmt_eq(t.key, pmt.pmt_intern("samp_rate"))):
                self.assertEqual(pmt.pmt_to_double(t.value), samp_rate)
            elif(pmt.pmt_eq(t.key, pmt.pmt_intern("rx_rate"))):
                self.assertEqual(pmt.pmt_to_double(t.value), samp_rate)

        # Test that the data portion was extracted and received correctly.
        self.assertComplexTuplesAlmostEqual(vsnk.data(), ssnk.data(), 5)

	os.remove(outfile)
Example #2
0
def pmt_from_dict(p):
    d = pmt.pmt_make_dict()
    for k, v in p.iteritems():
        #dict is immutable -> therefore pmt_dict_add returns the new dict
        d = pmt.pmt_dict_add(d, python_to_pmt(k), python_to_pmt(v))
    return d
Example #3
0
    def test_001(self):
        N = 1000
        outfile = "test_out.dat"

        detached = False
        samp_rate = 200000
        key = pmt.pmt_intern("samp_rate")
        val = pmt.pmt_from_double(samp_rate)
        extras = pmt.pmt_make_dict()
        extras = pmt.pmt_dict_add(extras, key, val)
        extras_str = pmt.pmt_serialize_str(extras)

        data = sig_source_c(samp_rate, 1000, 1, N)
        src = gr.vector_source_c(data)
        fsnk = blocks.file_meta_sink(
            gr.sizeof_gr_complex, outfile, samp_rate, 1, blocks.GR_FILE_FLOAT, True, 1000000, extras_str, detached
        )
        fsnk.set_unbuffered(True)

        self.tb.connect(src, fsnk)
        self.tb.run()
        fsnk.close()

        handle = open(outfile, "rb")
        header_str = handle.read(parse_file_metadata.HEADER_LENGTH)
        if len(header_str) == 0:
            self.assertFalse()

        try:
            header = pmt.pmt_deserialize_str(header_str)
        except RuntimeError:
            self.assertFalse()

        print header
        info = parse_file_metadata.parse_header(header, False)

        extra_str = handle.read(info["extra_len"])
        self.assertGreater(len(extra_str), 0)
        handle.close()

        try:
            extra = pmt.pmt_deserialize_str(extra_str)
        except RuntimeError:
            self.assertFalse()

        extra_info = parse_file_metadata.parse_extra_dict(extra, info, False)

        self.assertEqual(info["rx_rate"], samp_rate)
        self.assertEqual(pmt.pmt_to_double(extra_info["samp_rate"]), samp_rate)

        # Test file metadata source
        src.rewind()
        fsrc = blocks.file_meta_source(outfile, False)
        vsnk = gr.vector_sink_c()
        tsnk = gr.tag_debug(gr.sizeof_gr_complex, "QA")
        ssnk = gr.vector_sink_c()
        self.tb.disconnect(src, fsnk)
        self.tb.connect(fsrc, vsnk)
        self.tb.connect(fsrc, tsnk)
        self.tb.connect(src, ssnk)
        self.tb.run()

        # Test to make sure tags with 'samp_rate' and 'rx_rate' keys
        # were generated and received correctly.
        tags = tsnk.current_tags()
        for t in tags:
            if pmt.pmt_eq(t.key, pmt.pmt_intern("samp_rate")):
                self.assertEqual(pmt.pmt_to_double(t.value), samp_rate)
            elif pmt.pmt_eq(t.key, pmt.pmt_intern("rx_rate")):
                self.assertEqual(pmt.pmt_to_double(t.value), samp_rate)

        # Test that the data portion was extracted and received correctly.
        self.assertComplexTuplesAlmostEqual(vsnk.data(), ssnk.data(), 5)

        os.remove(outfile)
Example #4
0
def pmt_from_dict(p):
    d = pmt.pmt_make_dict()
    for k, v in p.iteritems():
        #dict is immutable -> therefore pmt_dict_add returns the new dict
        d = pmt.pmt_dict_add(d, python_to_pmt(k), python_to_pmt(v))
    return d