def test_compact_processor(self): n_messages = 5 generate_thread = Thread(target=simulate.generate_stream, args=( 9999, n_messages, )) generate_thread.setDaemon(True) generate_thread.start() source = "tcp://localhost:9999" h5.receive(source, self.h5_test_filename, n_messages=n_messages, message_processor=process_message_compact) generate_thread.join() file = h5py.File(self.h5_test_filename) expected_channels = set((x["name"] for x in simulated_channels)) self.assertSetEqual(set(file["data"].keys()), expected_channels) for channel_name in expected_channels: self.assertTrue(channel_name in file["data"]) self.assertEqual(len(file["data"][channel_name]), n_messages) self.assertTrue("pulse_id" in file.keys()) self.assertEqual(len(file["pulse_id"]), n_messages)
def h5( self, fina=None, channel_list=None, N_pulses=None, default_path=True, queue_size=100, compact_format=False, ): N_pulses *= 1 if os.path.isfile(fina): print("!!! File %s already exists, would you like to delete it?" % fina) if input("(y/n)") == "y": print("Deleting %s ." % fina) os.remove(fina) else: return path_as_path = Path(fina) if not path_as_path.parent.exists(): path_as_path.parent.mkdir() if not channel_list: print("No channels specified, using all lists instead.") channel_list = [] for tlist in self._default_channel_list.values(): channel_list.extend(tlist) print(channel_list) if compact_format: message_processor = process_message_compact else: message_processor = None source = dispatcher.request_stream(channel_list) mode = zmq.SUB try: print(f"message proc is {message_processor}") receive( source, fina, queue_size=queue_size, mode=mode, n_messages=N_pulses, message_processor=message_processor, ) except KeyboardInterrupt: # KeyboardInterrupt is thrown if the receiving is terminated via ctrl+c # As we don't want to see a stacktrace then catch this exception pass finally: print("Closing stream") dispatcher.remove_stream(source)
def bsread_to_h5(filename, channels, n_pulses=100, queue_size=100, mode=zmq.SUB): source = dispatcher.request_stream(channels) receive(source, filename, queue_size=queue_size, mode=mode, n_messages=n_pulses)
def test_receive(self): n_messages = 5 generate_thread = Thread(target=simulate.generate_stream, args=( 9999, n_messages, )) generate_thread.setDaemon(True) generate_thread.start() source = "tcp://localhost:9999" h5.receive(source, self.h5_test_filename, n_messages=n_messages) generate_thread.join() file = h5py.File(self.h5_test_filename) expected_channels = set((x["name"] for x in simulated_channels)) # Pulse id is added to the h5 file. expected_channels.add("pulse_id") self.assertSetEqual(set(file.keys()), expected_channels) # Pulse_id is a dataset, inspect it separately. expected_channels.remove("pulse_id") for channel_name in expected_channels: self.assertSetEqual( set(file[channel_name].keys()), {"data", "timestamp", "timestamp_offset", "pulse_id"}) self.assertEqual(len(file[channel_name]["data"]), n_messages) self.assertEqual(len(file[channel_name]["timestamp"]), n_messages) self.assertEqual(len(file[channel_name]["timestamp_offset"]), n_messages) self.assertEqual(len(file[channel_name]["pulse_id"]), n_messages) self.assertEqual(len(file["pulse_id"]), n_messages)
def test_save_camera_stream_to_h5(self): from bsread import h5, SUB camera_name = "simulation" file_name = "testing_dump.h5" n_messages = 10 # Change to match your camera server server_address = "http://0.0.0.0:8889" client = PipelineClient(server_address) instance_id, stream_address = client.create_instance_from_config( {"camera_name": camera_name}) # The output file 'output.h5' has 10 images from the simulation camera stream. h5.receive(source=stream_address, file_name=file_name, mode=SUB, n_messages=n_messages) self.assertTrue(os.path.exists(file_name))