def test_bad_filename(self): """Test that if we get a bad filename, then we should get a runtime error""" dirname = uuid.uuid4().hex filename = uuid.uuid4().hex # Make a data file with a weird name that doesn't exist data_file = os.path.join("/tmp", dirname, filename) # Try to instantiate the sink, this should error with self.assertRaises(RuntimeError): sigmf.sink("cf32_le", data_file)
def test_set_capture_meta_via_message(self): '''Test that when we send a message to set some metadata it gets set correctly''' src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) sender = msg_sender() tb = gr.top_block() tb.connect(src, file_sink) tb.msg_connect(sender, "out", file_sink, "command") tb.start() sleep(.1) sender.send_msg({ "command": "set_capture_meta", "index": 0, "key": "test:a", "val": 84 }) sleep(.2) tb.stop() tb.wait() with open(json_file, "r") as f: meta = json.load(f) print(meta) assert meta["captures"][0]["test:a"] == 84
def test_rx_time_conversion(self): '''Test that rx_time tags are correctly converted to iso8601 strings''' src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) seconds = 1520551983 frac_seconds = 0.09375 frac_seconds_2 = 0.25 correct_str_1 = datetime.utcfromtimestamp(seconds).strftime( '%Y-%m-%dT%H:%M:%S') correct_str_1 += str(frac_seconds).lstrip('0') + "Z" correct_str_2 = datetime.utcfromtimestamp(seconds).strftime( '%Y-%m-%dT%H:%M:%S') correct_str_2 += str(frac_seconds_2).lstrip('0') + "Z" injector = simple_tag_injector() # first sample should have a rx_time tag injector.inject_tag = {"rx_time": (seconds, frac_seconds)} tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, file_sink) tb.start() sleep(.2) # Also test the case where a tag arives while writing injector.inject_tag = {"rx_time": (seconds, frac_seconds_2)} sleep(.1) tb.stop() tb.wait() with open(json_file, "r") as f: meta = json.load(f) assert meta["captures"][0]["core:datetime"] == correct_str_1 assert meta["captures"][1]["core:datetime"] == correct_str_2
def test_gps_annotation(self): src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) sender = msg_sender() tb = gr.top_block() tb.connect(src, file_sink) tb.msg_connect(sender, "out", file_sink, "gps") coords = [ (12.345, -67.89), (55.555, -110.111), (33.123, 33.123), ] tb.start() for lat, lon in coords: sender.send_msg({ "latitude": lat, "longitude": lon, }) sleep(.05) tb.stop() tb.wait() metadata = json.load(open(json_file, "r")) # should be 3 annotations self.assertEqual(len(metadata["annotations"]), len(coords)) # And they should be these and in this order for ii, point in enumerate(coords): lat, lon = point annotation = metadata["annotations"][ii] self.assertEqual(annotation["core:latitude"], lat) self.assertEqual(annotation["core:longitude"], lon) self.assertIn("GPS", annotation["core:generator"])
def test_tags_to_annotation_segments(self): '''Test that tags correctly convert to annotation segments''' # FIXME: this test is occasionally flaky, as the flowgraph is shutdown # before all the messages get to the sink src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) injector = simple_tag_injector() tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, file_sink) tb.start() # Inject a bunch of tags that should make annotation segments for i in range(100): sleep(.001) frac, int_part = math.modf(time.time()) injector.inject_tag = {"test:a": i, "test:b": True, "test:c": 2.33} sleep(.5) tb.stop() tb.wait() metadata = json.load(open(json_file, "r")) # There should be 100 annotation segments self.assertEqual(len(metadata["annotations"]), 100) for i in range(100): self.assertEqual(metadata["annotations"][i]["test:a"], i) self.assertEqual(metadata["annotations"][i]["test:b"], True) self.assertEqual(metadata["annotations"][i]["test:c"], 2.33)
def test_tags_to_capture_segment(self): src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) injector = simple_tag_injector() tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, file_sink) tb.start() # Inject a bunch of tags that should make capture segments for i in range(100): sleep(.01) frac, int_part = math.modf(time.time()) injector.inject_tag = { "rx_freq": i * 1000, "rx_rate": i * 500, "rx_time": (int(int_part), frac) } tb.stop() tb.wait() metadata = json.load(open(json_file, "r")) # There should be 100 capture segments self.assertEqual(len(metadata["captures"]), 100) # And the data in them should match the tags we created for i in range(99): self.assertEqual(metadata["captures"][i + 1]["core:frequency"], i * 1000)
def test_exception_from_open_via_message(self): """Test that if open is called via a message, then an exception is thrown""" src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) file_sink = sigmf.sink("cf32_le", "") dirname = uuid.uuid4().hex filename = uuid.uuid4().hex # Make a data file with a weird name that doesn't exist data_file = os.path.join("/tmp", dirname, filename) sender = msg_sender() # eater = sample_eater() tb = gr.top_block() tb.connect(src, file_sink) tb.msg_connect(sender, "out", file_sink, "command") tb.start() sleep(.05) # This should result in an exception beting thrown in the block sender.send_msg({"command": "open", "filename": data_file}) # This is a bit of a hack, once there is better exception handling # behavior in gnuradio, this test will need to be updated. As is, we # can detect that a block has stopped running by checking that it's not # reading new items sleep(.05) items = file_sink.nitems_read(0) sleep(.05) items2 = file_sink.nitems_read(0) diff_items = items2 - items self.assertEqual(diff_items, 0, "Block didn't die from invalid open message!") tb.stop() tb.wait()
def process_func(death_queue): src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) file_sink = sigmf.sink("cf32_le", "") sender = msg_sender() eater = sample_eater() tb = gr.top_block() tb.connect(src, eater) tb.msg_connect(sender, "out", file_sink, "command") tb.connect(eater, file_sink) tb.start() sleep(.05) sender.send_msg({"command": "open", "filename": data_file}) # ensure it gets set while True: if file_sink.get_data_path() == "": sleep(.05) else: break # record some data sleep(.05) # eat all the samples so the work function # never gets called again eater.eat_samples = True # wait a bit sleep(.05) # send close, this should close and write the file in # the pmt handler sender.send_msg({"command": "close"}) # signal to outside that this can be killed death_queue.put("KILL") tb.wait()
def test_capture_datetime_on_start(self): '''If a file is open to start then it should have a datetime set and that datetime should be sort of accurate''' N = 1000 samp_rate = 200000 data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) # build flowgraph here tb = gr.top_block() tb.connect(src, file_sink) tb.run() tb.wait() # check that the metadata matches up with open(json_file, "r") as f: meta_str = f.read() meta = json.loads(meta_str) print(meta) meta_dt_str = meta["captures"][0]["core:datetime"] meta_dt = parse_iso_ts(meta_dt_str) print(meta_dt) assert (datetime.utcnow() - meta_dt).total_seconds() < 2
def test_rate_tags_to_global(self): '''Test to ensure that rate tags go to the global segment and not to the capture segment''' src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) # Set a value that will get overridden by the tags file_sink.set_global_meta("core:sample_rate", 20) injector = simple_tag_injector() tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, file_sink) tb.start() samp_rate = 1000.5 injector.inject_tag = {"rx_rate": samp_rate} sleep(.2) tb.stop() tb.wait() meta = json.load(open(json_file, "r")) # Samp rate should have been set by the tag assert meta["global"]["core:sample_rate"] == samp_rate # And should not be in the captures segment assert "core:sample_rate" not in meta["captures"][0]
def make_file( self, filename, annotations=None, captures=None, global_data=None, N=1000, type="cf32_le"): if (not filename.startswith("/")): filename = os.path.join(self.test_dir, filename) samp_rate = 200000 data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) file_sink = sigmf.sink(type, filename) data_path = file_sink.get_data_path() meta_path = file_sink.get_meta_path() tb = gr.top_block() tb.connect(src, file_sink) tb.run() with open(meta_path, "r+") as f: data = json.load(f) if annotations is not None: for anno in annotations: updated = False for anno_data in data["annotations"]: if anno["core:sample_start"] ==\ anno_data["core:sample_start"] and\ anno.get("core:sample_count", "NO_COUNT") ==\ anno_data.get("core:sample_count", "NO_COUNT"): anno_data.update(anno) updated = True break if not updated: data["annotations"].append(anno) data["annotations"].sort(key=lambda a: a["core:sample_start"]) if captures is not None: for capture in captures: updated = False for capture_data in data["captures"]: if capture["core:sample_start"] ==\ capture_data["core:sample_start"]: capture_data.update(capture) updated = True break if not updated: data["annotations"].append(anno) if global_data: data["global"].update(global_data) f.seek(0) json.dump(data, f, indent=4) f.truncate() with open(meta_path, "r") as f: meta_json = json.load(f) return data, meta_json, data_path, meta_path
def process_func(): src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) file_sink = sigmf.sink("cf32_le", data_file) tb = gr.top_block() tb.connect(src, file_sink) tb.start() tb.wait()
def test_normal_write(self): N = 1000 samp_rate = 200000 data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) description = "This is a test of the sigmf sink." author = "Just some person" file_license = "CC-0" hardware = "Vector Source" data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) file_sink.set_global_meta("core:sample_rate", samp_rate) file_sink.set_global_meta("core:description", description) file_sink.set_global_meta("core:author", author) file_sink.set_global_meta("core:sample_rate", author) file_sink.set_global_meta("core:license", file_license) file_sink.set_global_meta("core:hw", hardware) self.assertEqual(data_file, file_sink.get_data_path()) # And get_meta_path self.assertEqual(json_file, file_sink.get_meta_path()) # build flowgraph here tb = gr.top_block() tb.connect(src, file_sink) tb.run() tb.wait() # check that data file equals data read_data = [] with open(data_file, "rb") as f: try: while True: real = struct.unpack('f', f.read(4))[0] imag = struct.unpack('f', f.read(4))[0] read_data.append(real + (imag * 1j)) except: pass self.assertComplexTuplesAlmostEqual(read_data, data) # check that the metadata matches up with open(json_file, "r") as f: meta_str = f.read() meta = json.loads(meta_str) # Check global meta assert meta["global"]["core:datatype"] == "cf32_le" assert meta["global"]["core:description"] == description assert meta["global"]["core:author"] == author assert meta["global"]["core:license"] == file_license assert meta["global"]["core:hw"] == hardware # Check captures meta assert meta["captures"][0]["core:sample_start"] == 0
def test_relative_time_mode(self): # Example of Relative Mode Opertation # The following events happen: # Sample 0: rx_time: (2, 0.50000) at host time # of 2018-03-12T11:36:00.20000 # 10,000 samples follow # Sample 10,000: rx_time: (4, 0.80000) # 20,000 samples follow # Note that the relative time difference between the two # capture segments is precisely 2.3 seconds. # This should create two capture segments: # Capture Segment 1 core:datetime: 2018-03-12T11:36:00.20000 # Capture Segment 2 core:datetime: 2018-03-12T11:36:02.50000 limit_event = Event() continue_event = Event() samp_rate = 10000.0 limit_samples = samp_rate print(limit_samples) src = sample_producer(limit_samples, limit_event, continue_event) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file, sigmf.sigmf_time_mode_relative) file_sink.set_global_meta("core:sample_rate", samp_rate) injector = simple_tag_injector() # first sample should have a rx_time tag injector.inject_tag = {"rx_time": (2, 0.500000)} tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, file_sink) tb.start() print("waiting") limit_event.wait() # sleep to let the last samples get to the sink block sleep(.1) # set the rx_time tag for the next section injector.inject_tag = {"rx_time": (4, 0.80000)} continue_event.set() sleep(.1) tb.stop() tb.wait() with open(json_file, "r") as f: meta = json.load(f) capture_one_dt = parse_iso_ts(meta["captures"][0]["core:datetime"]) capture_two_dt = parse_iso_ts(meta["captures"][1]["core:datetime"]) diff_time = capture_two_dt - capture_one_dt assert diff_time.seconds == 2 assert diff_time.microseconds == 300000
def test_not_intially_open_annotation_tag_offsets(self): '''Test that if a sink is created without a file initially open, and then a file is opened, that annotation stream tags will have the correct offsets, i.e. they should be set from when the file was opened, not when the flowgraph started''' samp_rate = 200000 src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) description = "This is a test of the sigmf sink." author = "Just some person" file_license = "CC-0" hardware = "Sig Source" data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", "") file_sink.set_global_meta("core:sample_rate", samp_rate) file_sink.set_global_meta("core:description", description) file_sink.set_global_meta("core:author", author) file_sink.set_global_meta("core:sample_rate", author) file_sink.set_global_meta("core:license", file_license) file_sink.set_global_meta("core:hw", hardware) injector = simple_tag_injector() # build flowgraph here tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, file_sink) tb.start() time.sleep(.1) file_sink.open(data_file) time.sleep(.1) injector.inject_tag = {"test:a": 1} time.sleep(.1) tb.stop() tb.wait() injected_offset = injector.injected_offset with open(json_file, "r") as f: meta_str = f.read() meta = json.loads(meta_str) # Check global meta assert meta["global"]["core:description"] == description assert meta["global"]["core:author"] == author assert meta["global"]["core:license"] == file_license assert meta["global"]["core:hw"] == hardware # Check annotations meta # The sample_start should be less than what it was injected # at, since no file was open at first, so the internal offsets # were off assert (meta["annotations"][0]["core:sample_start"] < injected_offset)
def test_bad_types_set_global(self): '''Make sure that test_global_meta with a non allowed type throws an error''' data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) exception_hit = False try: file_sink.set_global_meta("core:sample_rate", [234, 2342, 234]) except: exception_hit = True assert exception_hit
def run_flowgraph(filename): samp_rate = 32000 head = blocks.head(gr.sizeof_float * 1, samp_rate) source = analog.sig_source_f(samp_rate, analog.GR_COS_WAVE, 1000, 1, 0) sigmf_sink = sigmf.sink("rf32_le", filename) tb = gr.top_block() tb.connect(source, head) tb.connect(head, sigmf_sink) tb.run() tb.wait()
def test_stream_tags_before_file(self): '''Test that stream tags received before a file is opened will get correctly set as metadata''' samp_rate = 200000 src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) description = "This is a test of the sigmf sink." author = "Just some person" file_license = "CC-0" hardware = "Sig Source" data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", "") file_sink.set_global_meta("core:sample_rate", samp_rate) file_sink.set_global_meta("core:description", description) file_sink.set_global_meta("core:author", author) file_sink.set_global_meta("core:sample_rate", author) file_sink.set_global_meta("core:license", file_license) file_sink.set_global_meta("core:hw", hardware) injector = simple_tag_injector() # build flowgraph here tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, file_sink) tb.start() time.sleep(.1) injector.inject_tag = {"test:a": 1} time.sleep(.1) injector.inject_tag = {"rx_freq": 900e6} time.sleep(.2) file_sink.open(data_file) time.sleep(.5) tb.stop() tb.wait() with open(json_file, "r") as f: meta_str = f.read() meta = json.loads(meta_str) # Check global meta assert meta["global"]["core:description"] == description assert meta["global"]["core:author"] == author assert meta["global"]["core:license"] == file_license assert meta["global"]["core:hw"] == hardware print(meta) # Check captures meta assert meta["captures"][0]["core:frequency"] == 900e6 # Check annotations meta, should be empty, since annotations are # meant for specific samples and shouldn't be saved assert len(meta["annotations"]) == 0
def run_check(dtype): N = 1000 samp_rate = 200000 data_file, json_file = self.temp_file_names() data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) file_sink = sigmf.sink(dtype, data_file) tb = gr.top_block() tb.connect(src, file_sink) tb.run() tb.wait() with open(json_file, "r") as f: meta = json.load(f) assert meta["global"]["core:datatype"] == ("cf32" + ending)
def test_pmt_to_annotation(self): src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) injector = simple_tag_injector() sender = msg_sender() counter = sample_counter() tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, counter) tb.connect(counter, file_sink) tb.msg_connect(sender, "out", file_sink, "command") tb.start() # sleep so the streamed annotation isn't the first one sleep(.1) # Inject one tag injector.inject_tag = {"test:a": 1} # Wait again so that we know the tag got processed sleep(.1) # Then tell it to add 2 more via pmts, # one before the injected tag sender.send_msg({ "command": "set_annotation_meta", "sample_start": 1, "sample_count": 10, "key": "test:b", "val": 22 }) # and one after sender.send_msg({ "command": "set_annotation_meta", "sample_start": counter.count + 1, "sample_count": 10, "key": "test:c", "val": True }) sleep(.25) tb.stop() tb.wait() metadata = json.load(open(json_file, "r")) # should be 3 annotations self.assertEqual(len(metadata["annotations"]), 3) # And they should be these and in this order self.assertEqual(metadata["annotations"][0]["test:b"], 22) self.assertEqual(metadata["annotations"][1]["test:a"], 1) self.assertEqual(metadata["annotations"][2]["test:c"], True)
def test_msg_annotation_meta_merging(self): src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) sender = msg_sender() tb = gr.top_block() tb.connect(src, file_sink) tb.msg_connect(sender, "out", file_sink, "command") tb.start() sender.send_msg({ "command": "set_annotation_meta", "sample_start": 1, "sample_count": 10, "key": "test:a", "val": 1 }) sender.send_msg({ "command": "set_annotation_meta", "sample_start": 1, "sample_count": 10, "key": "test:b", "val": 2 }) sender.send_msg({ "command": "set_annotation_meta", "sample_start": 1, "sample_count": 100, "key": "test:c", "val": 3 }) sleep(.25) tb.stop() tb.wait() metadata = json.load(open(json_file, "r")) # should be 2 annotations self.assertEqual(len(metadata["annotations"]), 2) # First should have both test:a and test:b self.assertEqual(metadata["annotations"][0]["core:sample_count"], 10) self.assertEqual(metadata["annotations"][0]["test:a"], 1) self.assertEqual(metadata["annotations"][0]["test:b"], 2) # Second should just have c self.assertEqual(metadata["annotations"][1]["core:sample_count"], 100) self.assertEqual(metadata["annotations"][1]["test:c"], 3)
def test_roundtrip_offset_initial_capture(self): """Test that if the first capture segment has an offset, then it gets correctly offset and output when roundtripped from a source to a sink""" # generate a file data, meta_json, filename, meta_file = self.make_file("offset") # drop the first 4 samples adjust_size = 4 with open(meta_file, "r+") as f: fdata = json.load(f) fdata['captures'][0]["core:sample_start"] = adjust_size fdata['captures'][0]["core:frequency"] = 2.4e9 f.seek(0) json.dump(fdata, f, indent=4) f.truncate() data_start_size = os.path.getsize(filename) out_data_file, out_json_file = self.temp_file_names() file_source = sigmf.source(filename, "cf32_le") file_sink = sigmf.sink("cf32_le", out_data_file) tagd = blocks.tag_debug(gr.sizeof_gr_complex, "test") tb = gr.top_block() tb.connect(file_source, file_sink) tb.connect(file_source, tagd) tb.start() tb.wait() data_end_size = os.path.getsize(out_data_file) # end data size should be smaller dropped_samples = adjust_size * 2 * 4 self.assertEqual(data_start_size - dropped_samples, data_end_size, "Wrong data size") with open(out_json_file, "r") as f: meta = json.load(f) print(meta) self.assertEqual(len(meta["annotations"]), 0, "Shouldn't be any annotations in file") self.assertEqual(len(meta["captures"]), 1, "Should only be 1 capture segment in file") self.assertEqual( meta["captures"][0]["core:frequency"], 2.4e9, "frequency tag is missing")
def test_cf32_to_ci8(self): path = self.make_file("test_source", "cf32", 10) # actual filename = os.path.join(self.test_dir, "test_sink") actual_source = sigmf.source(path, "ci8", False) actual_sink = sigmf.sink("ci8", filename) tb = gr.top_block() tb.connect(actual_source, actual_sink) tb.run() tb.wait() e = self.read_complex_data(path, 'f', 4) a = self.read_complex_data(filename, 'b', 1) np.testing.assert_almost_equal(e, a, decimal=0)
def test_initally_empty_file_write(self): '''Test that if the file is initially empty and then open is called, everything works as expected''' samp_rate = 200000 src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) description = "This is a test of the sigmf sink." author = "Just some person" file_license = "CC-0" hardware = "Sig Source" data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", "") file_sink.set_global_meta("core:sample_rate", samp_rate) file_sink.set_global_meta("core:description", description) file_sink.set_global_meta("core:author", author) file_sink.set_global_meta("core:sample_rate", author) file_sink.set_global_meta("core:license", file_license) file_sink.set_global_meta("core:hw", hardware) # build flowgraph here tb = gr.top_block() tb.connect(src, file_sink) tb.start() time.sleep(.5) file_sink.open(data_file) time.sleep(.5) tb.stop() tb.wait() # check that the metadata matches up with open(json_file, "r") as f: meta_str = f.read() meta = json.loads(meta_str) # Check global meta assert meta["global"]["core:description"] == description assert meta["global"]["core:author"] == author assert meta["global"]["core:license"] == file_license assert meta["global"]["core:hw"] == hardware # Check captures meta assert meta["captures"][0]["core:sample_start"] == 0 # check that data was recorded data_size = os.path.getsize(data_file) assert data_size > 0
def make_file(self, filename, N=1000, type="cf32_le"): if (not filename.startswith("/")): filename = os.path.join(self.test_dir, filename) samp_rate = 200000 data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) file_sink = sigmf.sink(type, filename) data_path = file_sink.get_data_path() meta_path = file_sink.get_meta_path() tb = gr.top_block() tb.connect(src, file_sink) tb.run() with open(meta_path, "r") as f: meta_json = json.load(f) return data, meta_json, data_path, meta_path
def run_iteration(wait_full, wait_frac): limit_event = Event() continue_event = Event() samp_rate = 10000.0 limit_samples = (samp_rate * wait_full) + (samp_rate * wait_frac) print(limit_samples) src = sample_producer(limit_samples, limit_event, continue_event) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", "") file_sink.set_global_meta("core:sample_rate", samp_rate) seconds = 1520551983 frac_seconds = 0.09375 end_seconds = seconds + wait_full end_frac = frac_seconds + wait_frac if (end_frac > 1): end_seconds += 1 end_frac -= 1 correct_str = datetime.utcfromtimestamp(end_seconds).strftime( '%Y-%m-%dT%H:%M:%S') correct_str += str(end_frac).lstrip('0') + "Z" injector = simple_tag_injector() # first sample should have a rx_time tag injector.inject_tag = {"rx_time": (seconds, frac_seconds)} tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, file_sink) tb.start() print("waiting") limit_event.wait() # sleep to let the last samples get to the sink block sleep(.1) file_sink.open(data_file) continue_event.set() sleep(.1) tb.stop() tb.wait() with open(json_file, "r") as f: meta = json.load(f) print(meta) assert meta["captures"][0]["core:datetime"] == correct_str
def test_relative_time_mode_initial_closed(self): '''Test relative time mode when the sink is initially not recording''' # limit_event = Event() # continue_event = Event() samp_rate = 100e6 limit_samples = samp_rate print(limit_samples) # src = sample_producer(limit_samples, limit_event, continue_event) src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", "", sigmf.sigmf_time_mode_relative) file_sink.set_global_meta("core:sample_rate", samp_rate) injector = simple_tag_injector() # first sample should have a rx_time tag injector.inject_tag = {"rx_time": (65000, 0.500000)} tb = gr.top_block() tb.connect(src, injector) tb.connect(injector, file_sink) tb.start() # sleep to let some samples get to the sink block sleep(.1) # set the rx_time tag for the next section file_sink.open(data_file) # Let some stuff get recorded sleep(.1) tb.stop() tb.wait() with open(json_file, "r") as f: meta = json.load(f) capture_one_dt = parse_iso_ts(meta["captures"][0]["core:datetime"]) now = datetime.utcnow() print(capture_one_dt) print(now) self.assertEqual(now.year, capture_one_dt.year, "Bad year in first capture segment") self.assertEqual(now.month, capture_one_dt.month, "Bad month in first capture segment") self.assertEqual(now.day, capture_one_dt.day, "Bad day in first capture segment")
def make_file(self, filename, type, amplitude): filename = os.path.join(self.test_dir, filename) tb = gr.top_block() if type == "rf32": head = blocks.head(gr.sizeof_float * 1, self.samp_rate) source = analog.sig_source_f(self.samp_rate, analog.GR_COS_WAVE, 1000, amplitude, 0) tb.connect(source, head) elif type == "ri32": head = blocks.head(gr.sizeof_int * 1, self.samp_rate) source = analog.sig_source_i(self.samp_rate, analog.GR_COS_WAVE, 1000, amplitude, 0) tb.connect(source, head) elif type == "ri16": head = blocks.head(gr.sizeof_short * 1, self.samp_rate) source = analog.sig_source_s(self.samp_rate, analog.GR_COS_WAVE, 1000, amplitude, 0) tb.connect(source, head) elif type == "ri8": head = blocks.head(gr.sizeof_char * 1, self.samp_rate) source = analog.sig_source_f(self.samp_rate, analog.GR_COS_WAVE, 1000, amplitude, 0) convert = blocks.float_to_char(1, 1) tb.connect(source, convert) tb.connect(convert, head) else: head = blocks.head(gr.sizeof_gr_complex * 1, self.samp_rate) source = analog.sig_source_c(self.samp_rate, analog.GR_COS_WAVE, 1000, amplitude, 0) tb.connect(source, head) sigmf_sink = sigmf.sink(type, filename) tb.connect(head, sigmf_sink) tb.run() tb.wait() return filename
def test_write_methods(self): src = analog.sig_source_c(0, analog.GR_CONST_WAVE, 0, 0, (1 + 1j)) data_file_1, json_file_1 = self.temp_file_names() data_file_2, json_file_2 = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file_1) counter = sample_counter() tb = gr.top_block() tb.connect(src, counter) tb.connect(counter, file_sink) tb.start() file_sink.set_global_meta("test:a", 1) sleep(.2) file_sink.open(data_file_2) sleep(.2) file_sink.set_global_meta("test:b", 2) sleep(.2) file_sink.close() sleep(.2) count_1 = counter.count sleep(.1) count_2 = counter.count tb.stop() tb.wait() # flow graph should still be running after flle # close, but dropping all packets on the floor self.assertGreater(count_2, count_1) # The metadata of the two files should be different meta_1 = json.load(open(json_file_1, "r")) meta_2 = json.load(open(json_file_2, "r")) self.assertEqual(meta_1["global"]["test:a"], 1) self.assertTrue("test:b" not in meta_1["global"]) # import pdb; pdb.set_trace() self.assertEqual(meta_2["global"]["test:b"], 2) self.assertTrue("test:a" not in meta_2["global"])
def test_ensure_empty_string_global_meta_setting(self): '''Ensure empty strings get propagated''' N = 1000 samp_rate = 200000 data = sig_source_c(samp_rate, 1000, 1, N) src = blocks.vector_source_c(data) description = "" author = "" file_license = "" hardware = "" data_file, json_file = self.temp_file_names() file_sink = sigmf.sink("cf32_le", data_file) file_sink.set_global_meta("core:sample_rate", samp_rate) file_sink.set_global_meta("core:description", description) file_sink.set_global_meta("core:author", author) file_sink.set_global_meta("core:sample_rate", author) file_sink.set_global_meta("core:license", file_license) file_sink.set_global_meta("core:hw", hardware) # build flowgraph here tb = gr.top_block() tb.connect(src, file_sink) tb.run() tb.wait() # check that the metadata matches up with open(json_file, "r") as f: meta_str = f.read() meta = json.loads(meta_str) # Check global meta assert meta["global"]["core:description"] == "" assert meta["global"]["core:author"] == "" assert meta["global"]["core:license"] == "" assert meta["global"]["core:hw"] == ""