示例#1
0
    def test_001(self):
        N = 1000
	outfile = "test_out.dat"

        detached = False
        samp_rate = 200000
        key = pmt.pmt_intern("samp_rate")
        val = pmt.pmt_from_double(samp_rate)
        extras = pmt.pmt_make_dict()
        extras = pmt.pmt_dict_add(extras, key, val)
        extras_str = pmt.pmt_serialize_str(extras)

        data = sig_source_c(samp_rate, 1000, 1, N)
        src  = gr.vector_source_c(data)
        fsnk = blocks.file_meta_sink(gr.sizeof_gr_complex, outfile,
                                     samp_rate, 1, 
                                     blocks.GR_FILE_FLOAT, True,
                                     1000000, extras_str, detached)
        fsnk.set_unbuffered(True)

	self.tb.connect(src, fsnk)
	self.tb.run()
        fsnk.close()

        handle = open(outfile, "rb")
        header_str = handle.read(parse_file_metadata.HEADER_LENGTH)
        if(len(header_str) == 0):
            self.assertFalse()

        try:
            header = pmt.pmt_deserialize_str(header_str)
        except RuntimeError:
            self.assertFalse()

        print header
        info = parse_file_metadata.parse_header(header, False)

        extra_str = handle.read(info["extra_len"])
        self.assertGreater(len(extra_str), 0)
        handle.close()

        try:
            extra = pmt.pmt_deserialize_str(extra_str)
        except RuntimeError:
            self.assertFalse()

        extra_info = parse_file_metadata.parse_extra_dict(extra, info, False)

        self.assertEqual(info['rx_rate'], samp_rate)
        self.assertEqual(pmt.pmt_to_double(extra_info['samp_rate']), samp_rate)


        # Test file metadata source
        src.rewind()
        fsrc = blocks.file_meta_source(outfile, False)
        vsnk = gr.vector_sink_c()
        tsnk = gr.tag_debug(gr.sizeof_gr_complex, "QA")
        ssnk = gr.vector_sink_c()
        self.tb.disconnect(src, fsnk)
        self.tb.connect(fsrc, vsnk)
        self.tb.connect(fsrc, tsnk)
        self.tb.connect(src, ssnk)
        self.tb.run()

        # Test to make sure tags with 'samp_rate' and 'rx_rate' keys
        # were generated and received correctly.
        tags = tsnk.current_tags()
        for t in tags:
            if(pmt.pmt_eq(t.key, pmt.pmt_intern("samp_rate"))):
                self.assertEqual(pmt.pmt_to_double(t.value), samp_rate)
            elif(pmt.pmt_eq(t.key, pmt.pmt_intern("rx_rate"))):
                self.assertEqual(pmt.pmt_to_double(t.value), samp_rate)

        # Test that the data portion was extracted and received correctly.
        self.assertComplexTuplesAlmostEqual(vsnk.data(), ssnk.data(), 5)

	os.remove(outfile)
def parse_header(p, VERBOSE=False):
    dump = pmt.PMT_NIL

    info = dict()

    if(pmt.pmt_is_dict(p) is False):
        sys.stderr.write("Header is not a PMT dictionary: invalid or corrupt data file.\n")
        sys.exit(1)

    # GET FILE FORMAT VERSION NUMBER
    if(pmt.pmt_dict_has_key(p, pmt.pmt_string_to_symbol("version"))):
        r = pmt.pmt_dict_ref(p, pmt.pmt_string_to_symbol("version"), dump)
        version = pmt.pmt_to_long(r)
        if(VERBOSE):
            print "Version Number: {0}".format(version)
    else:
        sys.stderr.write("Could not find key 'version': invalid or corrupt data file.\n")
        sys.exit(1)

    # EXTRACT SAMPLE RATE
    if(pmt.pmt_dict_has_key(p, pmt.pmt_string_to_symbol("rx_rate"))):
        r = pmt.pmt_dict_ref(p, pmt.pmt_string_to_symbol("rx_rate"), dump)
        samp_rate = pmt.pmt_to_double(r)
        info["rx_rate"] = samp_rate
        if(VERBOSE):
            print "Sample Rate: {0:.2f} sps".format(samp_rate)
    else:
        sys.stderr.write("Could not find key 'sr': invalid or corrupt data file.\n")
        sys.exit(1)

    # EXTRACT TIME STAMP
    if(pmt.pmt_dict_has_key(p, pmt.pmt_string_to_symbol("rx_time"))):
        r = pmt.pmt_dict_ref(p, pmt.pmt_string_to_symbol("rx_time"), dump)
        pmt_secs = pmt.pmt_tuple_ref(r, 0)
        pmt_fracs = pmt.pmt_tuple_ref(r, 1)
        secs = float(pmt.pmt_to_uint64(pmt_secs))
        fracs = pmt.pmt_to_double(pmt_fracs)
        t = secs + fracs
        info["rx_time"] = t
        if(VERBOSE):
            print "Seconds: {0:.6f}".format(t)
    else:
        sys.stderr.write("Could not find key 'time': invalid or corrupt data file.\n")
        sys.exit(1)

    # EXTRACT ITEM SIZE
    if(pmt.pmt_dict_has_key(p, pmt.pmt_string_to_symbol("size"))):
        r = pmt.pmt_dict_ref(p, pmt.pmt_string_to_symbol("size"), dump)
        dsize = pmt.pmt_to_long(r)
        info["size"] = dsize
        if(VERBOSE):
            print "Item size: {0}".format(dsize)
    else:
        sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n")
        sys.exit(1)

    # EXTRACT DATA TYPE
    if(pmt.pmt_dict_has_key(p, pmt.pmt_string_to_symbol("type"))):
        r = pmt.pmt_dict_ref(p, pmt.pmt_string_to_symbol("type"), dump)
        dtype = pmt.pmt_to_long(r)
        stype = ftype_to_string[dtype]
        info["type"] = stype
        if(VERBOSE):
            print "Data Type: {0} ({1})".format(stype, dtype)
    else:
        sys.stderr.write("Could not find key 'type': invalid or corrupt data file.\n")
        sys.exit(1)

    # EXTRACT COMPLEX
    if(pmt.pmt_dict_has_key(p, pmt.pmt_string_to_symbol("cplx"))):
        r = pmt.pmt_dict_ref(p, pmt.pmt_string_to_symbol("cplx"), dump)
        cplx = pmt.pmt_to_bool(r)
        info["cplx"] = cplx
        if(VERBOSE):
            print "Complex? {0}".format(cplx)
    else:
        sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n")
        sys.exit(1)

    # EXTRACT WHERE CURRENT SEGMENT STARTS
    if(pmt.pmt_dict_has_key(p, pmt.pmt_string_to_symbol("strt"))):
        r = pmt.pmt_dict_ref(p, pmt.pmt_string_to_symbol("strt"), dump)
        seg_start = pmt.pmt_to_uint64(r)
        info["hdr_len"] = seg_start
        info["extra_len"] = seg_start - HEADER_LENGTH
        info["has_extra"] = info["extra_len"] > 0
        if(VERBOSE):
            print "Header Length: {0} bytes".format(info["hdr_len"])
            print "Extra Length:  {0}".format((info["extra_len"]))
            print "Extra Header?  {0}".format(info["has_extra"])
    else:
        sys.stderr.write("Could not find key 'strt': invalid or corrupt data file.\n")
        sys.exit(1)

    # EXTRACT SIZE OF DATA
    if(pmt.pmt_dict_has_key(p, pmt.pmt_string_to_symbol("bytes"))):
        r = pmt.pmt_dict_ref(p, pmt.pmt_string_to_symbol("bytes"), dump)
        nbytes = pmt.pmt_to_uint64(r)

        nitems = nbytes/dsize
        info["nitems"] = nitems
        info["nbytes"] = nbytes

        if(VERBOSE):
            print "Size of Data: {0} bytes".format(nbytes)
            print "              {0} items".format(nitems)
    else:
        sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n")
        sys.exit(1)

    return info
示例#3
0
    def test_001(self):
        N = 1000
        outfile = "test_out.dat"

        detached = False
        samp_rate = 200000
        key = pmt.pmt_intern("samp_rate")
        val = pmt.pmt_from_double(samp_rate)
        extras = pmt.pmt_make_dict()
        extras = pmt.pmt_dict_add(extras, key, val)
        extras_str = pmt.pmt_serialize_str(extras)

        data = sig_source_c(samp_rate, 1000, 1, N)
        src = gr.vector_source_c(data)
        fsnk = blocks.file_meta_sink(
            gr.sizeof_gr_complex, outfile, samp_rate, 1, blocks.GR_FILE_FLOAT, True, 1000000, extras_str, detached
        )
        fsnk.set_unbuffered(True)

        self.tb.connect(src, fsnk)
        self.tb.run()
        fsnk.close()

        handle = open(outfile, "rb")
        header_str = handle.read(parse_file_metadata.HEADER_LENGTH)
        if len(header_str) == 0:
            self.assertFalse()

        try:
            header = pmt.pmt_deserialize_str(header_str)
        except RuntimeError:
            self.assertFalse()

        print header
        info = parse_file_metadata.parse_header(header, False)

        extra_str = handle.read(info["extra_len"])
        self.assertGreater(len(extra_str), 0)
        handle.close()

        try:
            extra = pmt.pmt_deserialize_str(extra_str)
        except RuntimeError:
            self.assertFalse()

        extra_info = parse_file_metadata.parse_extra_dict(extra, info, False)

        self.assertEqual(info["rx_rate"], samp_rate)
        self.assertEqual(pmt.pmt_to_double(extra_info["samp_rate"]), samp_rate)

        # Test file metadata source
        src.rewind()
        fsrc = blocks.file_meta_source(outfile, False)
        vsnk = gr.vector_sink_c()
        tsnk = gr.tag_debug(gr.sizeof_gr_complex, "QA")
        ssnk = gr.vector_sink_c()
        self.tb.disconnect(src, fsnk)
        self.tb.connect(fsrc, vsnk)
        self.tb.connect(fsrc, tsnk)
        self.tb.connect(src, ssnk)
        self.tb.run()

        # Test to make sure tags with 'samp_rate' and 'rx_rate' keys
        # were generated and received correctly.
        tags = tsnk.current_tags()
        for t in tags:
            if pmt.pmt_eq(t.key, pmt.pmt_intern("samp_rate")):
                self.assertEqual(pmt.pmt_to_double(t.value), samp_rate)
            elif pmt.pmt_eq(t.key, pmt.pmt_intern("rx_rate")):
                self.assertEqual(pmt.pmt_to_double(t.value), samp_rate)

        # Test that the data portion was extracted and received correctly.
        self.assertComplexTuplesAlmostEqual(vsnk.data(), ssnk.data(), 5)

        os.remove(outfile)