Exemple #1
0
    def test_multiple_work_calls_tag_offsets(self):
        '''Test that if the work is called multiple times,
        tags still end up in the right places'''

        # generate a file
        num_samps = 4000000
        data, meta_json, filename, meta_file = self.make_file("multi_work",
                                                              N=num_samps)

        # Add a capture in the middle
        meta_json["captures"].append({
            "core:sample_start": num_samps / 2,
            "test:a": 1
        })
        # and on the last sample
        meta_json["captures"].append({
            "core:sample_start": num_samps - 1,
            "test:b": 2
        })
        with open(meta_file, "w") as f:
            json.dump(meta_json, f)

        file_source = sigmf.source(filename, "cf32_le")
        sink = blocks.vector_sink_c()
        collector = tag_collector()
        tb = gr.top_block()
        tb.connect(file_source, collector)
        tb.connect(collector, sink)
        tb.start()
        tb.wait()
        print(collector.tags)
        collector.assertTagExistsMsg(num_samps / 2, "test:a", 1,
                                     "missing tag!", self)
        collector.assertTagExistsMsg(num_samps - 1, "test:b", 2,
                                     "missing tag!", self)
Exemple #2
0
    def test_annotations_to_tags(self):
        data, meta_json, filename, meta_file = self.make_file(
            "annotation_tags")

        # Add some annotations
        with open(meta_file, "r+") as f:
            data = json.load(f)
            data['annotations'].append({
                "core:sample_start": 5,
                "test:string": "This is some string data",
                "test:more_data": True,
            })
            data['annotations'].append({
                "core:sample_start": 10,
                "test:rating": 12,
            })

            # Write over f with a version with the new annotations
            f.seek(0)
            json.dump(data, f, indent=4)
            f.truncate()

        # run through the flowgraph
        file_source = sigmf.source(filename, "cf32_le")
        sink = blocks.vector_sink_c()
        collector = tag_collector()
        tb = gr.top_block()
        tb.connect(file_source, collector)
        tb.connect(collector, sink)
        tb.run()

        collector.assertTagExists(5, "test:string", "This is some string data")
        collector.assertTagExists(5, "test:more_data", True)
        collector.assertTagExists(10, "test:rating", 12)
Exemple #3
0
    def test_json_types(self):

        # generate a file
        data, meta_json, filename, meta_file = self.make_file("json_types")

        # Add annotations with all types
        with open(meta_file, "r+") as f:
            data = json.load(f)
            data['annotations'].append({
                "core:sample_start": 1,
                "core:sample_count": 2,
                "test:int": -2,
                "test:int64": 278202993021,
                "test:uint": 2,
                "test:uint2": 2**32 + 2,
                "test:double": 2.2,
                "test:neg_double": -2.2,
                "test:bool1": True,
                "test:bool2": False,
                "test:null": None,
                "test:string": "foo",
            })
            f.seek(0)
            json.dump(data, f, indent=4)
            f.truncate()

        # run through the flowgraph
        file_source = sigmf.source(filename, "cf32_le")
        sink = blocks.vector_sink_c()
        collector = tag_collector()
        tb = gr.top_block()
        tb.connect(file_source, collector)
        tb.connect(collector, sink)
        tb.start()
        tb.wait()

        # Check that all the types got read correctly
        collector.assertTagExists(1, "test:int", -2)
        collector.assertTagExists(1, "test:int64", 278202993021)
        collector.assertTagExists(1, "test:uint", 2)
        collector.assertTagExists(1, "test:uint2", 2**32 + 2)
        collector.assertTagExists(1, "test:double", 2.2)
        collector.assertTagExists(1, "test:neg_double", -2.2)
        collector.assertTagExists(1, "test:bool1", True)
        collector.assertTagExists(1, "test:bool2", False)
        collector.assertTagExists(1, "test:null", None)
        collector.assertTagExists(1, "test:string", "foo")
Exemple #4
0
    def test_begin_tags(self):
        data, meta_json, filename, meta_file = self.make_file("begin")

        # run through the flowgraph
        file_source = sigmf.source(filename, "cf32_le")
        begin_tag = pmt.to_pmt("TEST")
        file_source.set_begin_tag(begin_tag)
        sink = blocks.vector_sink_c()
        collector = tag_collector()
        tb = gr.top_block()
        tb.connect(file_source, collector)
        tb.connect(collector, sink)
        tb.run()

        zero_offset_tags = [t for t in collector.tags if t["offset"] == 0]
        test_tag = [t for t in zero_offset_tags if t["key"] == "TEST"]
        self.assertEqual(len(test_tag), 1)
Exemple #5
0
    def test_command_message(self):
        data, meta_json, filename, meta_file = self.make_file("begin")

        # run through the flowgraph
        file_source = sigmf.source(filename, "cf32_le", repeat=True)
        msg = {"command": "set_begin_tag", "tag": "test"}
        generator = message_generator(msg)
        sink = blocks.vector_sink_c()
        collector = tag_collector()

        tb = gr.top_block()
        tb.msg_connect((generator, 'messages'), (file_source, 'command'))
        tb.connect(file_source, collector)
        tb.connect(collector, sink)
        tb.start()
        sleep(1)
        tb.stop()
        tb.wait()

        for tag in collector.tags:
            if tag["key"] != "rx_time":
                self.assertEqual(tag["key"], "test")
Exemple #6
0
    def test_capture_segments_to_tags(self):
        data, meta_json, filename, meta_file = self.make_file("capture_segs")

        # Add some capture segments
        with open(meta_file, "r+") as f:
            data = json.load(f)
            data['captures'].append({
                "core:sample_start": 5,
                "core:frequency": 2.4e9,
            })
            data['captures'].append({
                "core:sample_start": 10,
                "core:frequency": 2.44e9,
            })
            f.seek(0)
            json.dump(data, f, indent=4)
            f.truncate()

        # run through the flowgraph
        file_source = sigmf.source(filename, "cf32_le")
        begin_tag = pmt.to_pmt("TEST")
        file_source.set_begin_tag(begin_tag)
        sink = blocks.vector_sink_c()
        collector = tag_collector()
        tb = gr.top_block()
        tb.connect(file_source, collector)
        tb.connect(collector, sink)
        tb.run()

        # There should be 3 tags
        print(collector.tags)
        zero_offset_tags = [t for t in collector.tags if t["offset"] == 0]
        test_tag = [t for t in zero_offset_tags if t["key"] == "TEST"]
        self.assertEqual(len(test_tag), 1)
        collector.assertTagExists(5, "rx_freq", 2.4e9)
        collector.assertTagExists(10, "rx_freq", 2.44e9)
    def test_tag_roundtrip(self):
        # write some data with both capture and annotation data
        freq = 2.4e9
        samp_rate = 100e6
        test_index = 1000
        time = tuple([1222277384, .0625])
        test_a = 22.3125
        test_b = "asdf"
        test_c = True
        test_index_2 = 2000
        test_d = 18.125
        test_e = "jkl;"
        test_f = False
        injector = advanced_tag_injector([
            (0, {"rx_time": time}),
            (0, {"rx_freq": freq}),
            (0, {"rx_rate": samp_rate}),
            (test_index, {"test:a": test_a,
                          "test:b": test_b, "test:c": test_c}),
            (test_index_2, {"test_d": test_d,
                            "test_e": test_e, "test_f": test_f})
        ])
        src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j))
        num_samps = int(1e6)
        head = blocks.head(gr.sizeof_gr_complex, num_samps)
        data_file, json_file = self.temp_file_names()
        file_sink = sigmf.sink("cf32_le",
                               data_file)

        tb = gr.top_block()
        tb.connect(src, head)
        tb.connect(head, injector)
        tb.connect(injector, file_sink)
        tb.start()
        tb.wait()
        # Make sure the data file got written
        self.assertTrue(os.path.exists(data_file), "Data file missing")
        self.assertEqual(os.path.getsize(
            data_file), gr.sizeof_gr_complex * num_samps,
            "Data file incomplete")

        # Ensure that the data exists as we think it should
        with open(json_file, "r") as f:
            meta_str = f.read()
            meta = json.loads(meta_str)
            print(meta)
            self.assertEqual(
                meta["captures"][0]["core:frequency"],
                freq, "Bad metadata, frequency")
            # Should only be one capture segment
            self.assertEqual(len(meta["captures"]), 1)
            self.assertEqual(meta["global"]["core:sample_rate"],
                             samp_rate, "Bad metadata, samp_rate")

            self.assertEqual(meta["annotations"][0]
                             ["test:a"], test_a, "bad test_a value")
            self.assertEqual(meta["annotations"][0]
                             ["test:b"], test_b, "bad test_b value")
            self.assertEqual(meta["annotations"][0]
                             ["test:c"], test_c, "bad test_c value")
            self.assertEqual(
                meta["annotations"][0]["core:sample_start"],
                test_index, "Bad test index")
            self.assertEqual(meta["annotations"][1]
                             ["unknown:test_d"], test_d, "bad test_d value")
            self.assertEqual(meta["annotations"][1]
                             ["unknown:test_e"], test_e, "bad test_e value")
            self.assertEqual(meta["annotations"][1]
                             ["unknown:test_f"], test_f, "bad test_f value")
            self.assertEqual(
                meta["annotations"][1]["core:sample_start"],
                test_index_2, "Bad test index")

        # Read out the data and check that it matches
        file_source = sigmf.source(data_file, "cf32_le")
        collector = tag_collector()
        sink = blocks.vector_sink_c()
        tb = gr.top_block()
        tb.connect(file_source, collector)
        tb.connect(collector, sink)
        tb.start()
        tb.wait()
        collector.assertTagExists(0, "rx_rate", samp_rate)
        collector.assertTagExists(0, "rx_time", time)
        collector.assertTagExists(0, "rx_freq", freq)
        collector.assertTagExists(test_index, "test:a", test_a)
        collector.assertTagExists(test_index, "test:b", test_b)
        collector.assertTagExists(test_index, "test:c", test_c)
        collector.assertTagExists(
            test_index_2, "test_d", test_d)
        collector.assertTagExists(
            test_index_2, "test_e", test_e)
        collector.assertTagExists(
            test_index_2, "test_f", test_f)