class StreamRequestSender(object): def __init__(self, output_port, queue_length, send_timeout, mode, epics_writer_url): self.output_port = output_port self.queue_length = queue_length self.send_timeout = send_timeout self.mode = mode self.epics_writer_url = epics_writer_url _logger.info( "Starting stream request sender with output_port=%s, queue_length=%s, send_timeout=%s, mode=%s " "and epics_writer_url=%s" % (self.output_port, self.queue_length, self.send_timeout, self.mode, self.epics_writer_url)) self.output_stream = Sender(port=self.output_port, queue_size=self.queue_length, send_timeout=self.send_timeout, mode=self.mode) self.output_stream.open() def send(self, write_request, sendto_epics_writer=True): _logger.info("Sending write write_request: %s" % write_request) self.output_stream.send(data=write_request) if self.epics_writer_url and sendto_epics_writer: def send_epics_request(): try: epics_writer_request = { "range": json.loads(write_request["data_api_request"])["range"], "parameters": json.loads(write_request["parameters"]) } _logger.info("Sending epics writer request %s" % epics_writer_request) requests.put(url=self.epics_writer_url, json=epics_writer_request) except Exception as e: _logger.error( "Error while trying to forward the write request to the epics writer.", e) Thread(target=send_epics_request).start()
def create_sender(self, stop_event, port): self.stop_event = stop_event if self.camera_config.get_configuration().get("protocol", "tcp") == "ipc": sender = IpcSender(address=get_ipc_address(self.get_name()), mode=PUB, data_header_compression=config.CAMERA_BSREAD_DATA_HEADER_COMPRESSION) else: sender = Sender(port=port, mode=PUB, data_header_compression=config.CAMERA_BSREAD_DATA_HEADER_COMPRESSION) sender.open(no_client_action=self.no_client_timeout, no_client_timeout=self.get_client_timeout()) return sender
def create_sender(output_stream_port, stop_event): global sender, pid_buffer, pid_buffer_size sender = None pars = get_parameters() def no_client_action(): global sender nonlocal pars if pars["no_client_timeout"] > 0: _logger.warning( "No client connected to the pipeline stream for %d seconds. Closing instance. %s", pars["no_client_timeout"], log_tag) stop_event.set() if sender: if pars["mode"] == "PUSH" and pars["block"]: _logger.warning( "Killing the process: cannot stop gracefully if sender is blocking" ) os._exit(0) if pars["mode"] == "FILE": file_name = pars["file"] records = pars.get("records") sender = WriterSender(output_file=file_name, number_of_records=records if records else UNDEFINED_NUMBER_OF_RECORDS, layout=pars["layout"], save_local_timestamps=pars["localtime"], change=pars["change"], attributes={}) else: output_stream = pars.get("output_stream", None) address = "tcp://*" connect_type = BIND if output_stream: connect_type = CONNECT address, output_stream_port = get_host_port_from_stream_address( output_stream) address = "tcp://" + address sender = Sender(port=output_stream_port, address=address, conn_type=connect_type, mode=PUSH if (pars["mode"] == "PUSH") else PUB, queue_size=pars["queue_size"], block=pars["block"], data_header_compression=config. CAMERA_BSREAD_DATA_HEADER_COMPRESSION) sender.open(no_client_action=no_client_action, no_client_timeout=pars["no_client_timeout"] if pars["no_client_timeout"] > 0 else sys.maxsize) init_sender(sender, pars) if pars.get("pid_buffer", 0) > 1: pid_buffer_size = pars.get("pid_buffer") pid_buffer = {} return sender
def __init__(self, output_port, queue_length, send_timeout, mode, epics_writer_url): self.output_port = output_port self.queue_length = queue_length self.send_timeout = send_timeout self.mode = mode self.epics_writer_url = epics_writer_url _logger.info( "Starting stream request sender with output_port=%s, queue_length=%s, send_timeout=%s, mode=%s " "and epics_writer_url=%s" % (self.output_port, self.queue_length, self.send_timeout, self.mode, self.epics_writer_url)) self.output_stream = Sender(port=self.output_port, queue_size=self.queue_length, send_timeout=self.send_timeout, mode=self.mode) self.output_stream.open()
def generate_real_stream(port, n_messages=None, interval=0.01): from bsread.sender import Sender generator = Sender(port=port) for channel in simulated_channels: generator.add_channel(**channel) generator.generate_stream(n_messages=n_messages, interval=interval)
def start_sender(): # Start a mock sender stream. generator = Sender(block=False) generator.add_channel('CAMERA1:X', lambda x: x, metadata={'type': 'int32'}) generator.add_channel('CAMERA1:Y', lambda x: x, metadata={'type': 'int32'}) generator.add_channel('CAMERA1:VALID', lambda x: 10, metadata={'type': 'int32'}) generator.open() while bs_sending: generator.send() time.sleep(0.05) generator.close()
def run(stop_event, statistics, parameter_queue, cam_client, pipeline_config, output_stream_port, background_manager, user_scripts_manager=None): def no_client_action(): nonlocal parameters if parameters["no_client_timeout"] > 0: _logger.warning( "No client connected to the pipeline stream for %d seconds. Closing instance. %s" % (config.MFLOW_NO_CLIENTS_TIMEOUT, log_tag)) stop_event.set() source = None sender = None set_log_tag("store_pipeline") exit_code = 0 parameters = init_pipeline_parameters(pipeline_config, parameter_queue, user_scripts_manager) if parameters.get("no_client_timeout") is None: parameters["no_client_timeout"] = config.MFLOW_NO_CLIENTS_TIMEOUT modulo = parameters.get("modulo", None) try: init_statistics(statistics) camera_name = pipeline_config.get_camera_name() stream_image_name = camera_name + config.EPICS_PV_SUFFIX_IMAGE set_log_tag(" [" + str(camera_name) + " | " + str(pipeline_config.get_name()) + ":" + str(output_stream_port) + "]") source = connect_to_camera(cam_client) _logger.debug("Opening output stream on port %d. %s", output_stream_port, log_tag) sender = Sender(port=output_stream_port, mode=PUSH, data_header_compression=config. CAMERA_BSREAD_DATA_HEADER_COMPRESSION, block=False) sender.open(no_client_action=no_client_action, no_client_timeout=parameters["no_client_timeout"] if parameters["no_client_timeout"] > 0 else sys.maxsize) init_sender(sender, parameters) # TODO: Register proper channels. # Indicate that the startup was successful. stop_event.clear() _logger.debug("Transceiver started. %s" % log_tag) counter = 1 while not stop_event.is_set(): try: data = source.receive() update_statistics( sender, data.statistics.total_bytes_received if data else 0, 1 if data else 0) if modulo: if counter < modulo: counter = counter + 1 continue else: counter = 1 # In case of receiving error or timeout, the returned data is None. if data is None: continue forward_data = { stream_image_name: data.data.data["image"].value } pulse_id = data.data.pulse_id timestamp = (data.data.global_timestamp, data.data.global_timestamp_offset) send(sender, forward_data, timestamp, pulse_id) except: _logger.exception("Could not process message. %s" % log_tag) stop_event.set() _logger.info("Stopping transceiver. %s" % log_tag) except: _logger.exception( "Exception while trying to start the receive and process thread. %s" % log_tag) exit_code = 1 raise finally: _logger.info("Stopping transceiver. %s" % log_tag) if source: source.disconnect() if sender: try: sender.close() except: pass sys.exit(exit_code)
def createStream(): s = TestData() generator = Sender() for par in pars: generator.add_channel(par, partial(s.getPar, parameter=par)) generator.generate_stream()
def image(pulse_id): image = [] for i in range(2): # line = [] # for index in range(0, 30, 1): # grad = (3.1415*index/float(200))+pulse_id/float(100) # line.append(math.sin(grad)) # image.append(line) image.append([1.0, 2.0, 3.0, 4.0]) return image if __name__ == "__main__": generator = Sender() generator.add_channel('ABC', lambda x: x, metadata={'type': 'int32'}) generator.add_channel('ABC_BIG', lambda x: struct.pack('>i', x), metadata={ 'type': 'int32', 'encoding': 'big' }) generator.add_channel('ABCD', lambda x: x * 10.0) generator.add_channel('ABCDF', lambda x: x * 100.0) generator.add_channel('XYZ', lambda x: x * 200.0) generator.add_channel('XYZW', lambda x: 'hello', metadata={'type': 'string'}) generator.add_channel('WWW', lambda x: [1.0, 2.0, 3.0, 4.0],
def main(): parser = argparse.ArgumentParser() device_name = os.getenv("DEVICE_NAME", "test_device") parser.add_argument('--device_name', type=str, default=device_name, help='Simulated device name') source_file = os.getenv("SOURCE_FILE", "sources.json") parser.add_argument('--source_file', type=str, default=source_file, help='Simulation sources file') port = os.getenv("PORT", 9999) parser.add_argument('--port', type=int, default=port, help='Simulation sources file') args = parser.parse_args() device_name = args.device_name source_file = args.source_file port = args.port _logger.info("Starting generator for device %s on port %s." % (device_name, port)) with open(source_file, 'r') as input_file: sources = json.load(input_file) if device_name not in sources: raise ValueError("device_name=%s not found in sources file." % device_name) try: channels_metadata = sources[device_name] sender = Sender(port=port) for channel in channels_metadata: _logger.info("Adding channel %s with type %s and shape %s.." % (channel["name"], channel["type"], channel["shape"])) sender.add_channel(name=channel["name"], function=get_generator_function( channel["type"], channel["shape"]), metadata={ "type": channel["type"], "shape": channel["shape"] }) sender.generate_stream() except KeyboardInterrupt: _logger.info('Generator %s interupted (SIGINT)', device_name) except Exception as e: _logger.exception('Generator %s stopped', device_name)
def start_sender(): # Start a mock sender stream. generator = Sender(block=False) generator.add_channel('CAMERA1:X', lambda x: x, metadata={'type': 'int32'}) generator.add_channel('CAMERA1:Y', lambda x: x, metadata={'type': 'int32'}) generator.add_channel('CAMERA1:VALID', lambda x: 10, metadata={'type': 'int32'}) generator.add_channel('BEAM_OK', lambda x: 1 if x % 20 == 0 else 0, metadata={'type': 'int32'}) generator.open() while bs_sending: generator.send() time.sleep(MOCK_SENDER_INTERVAL) generator.close()
def process_bsread_camera(stop_event, statistics, parameter_queue, camera, port): """ Start the camera stream and receive the incoming bsread streams. This function blocks until stop_event is set. :param stop_event: Event when to stop the process. :param statistics: Statistics namespace. :param parameter_queue: Parameters queue to be passed to the pipeline. :param camera: Camera instance to get the stream from. :param port: Port to use to bind the output stream. """ sender = None camera_stream = None try: # If there is no client for some time, disconnect. def no_client_timeout(): _logger.info( "No client connected to the '%s' stream for %d seconds. Closing instance." % (camera.get_name(), config.MFLOW_NO_CLIENTS_TIMEOUT)) stop_event.set() def process_parameters(): nonlocal x_size, y_size, x_axis, y_axis x_axis, y_axis = camera.get_x_y_axis() x_size, y_size = camera.get_geometry() # TODO: Use to register proper channels. But be aware that the size and dtype can change during the running. # def register_image_channel(size_x, size_y, dtype): # sender.add_channel("image", metadata={"compression": config.CAMERA_BSREAD_IMAGE_COMPRESSION, # "shape": [size_x, size_y], # "type": dtype}) x_size = y_size = x_axis = y_axis = None sender = Sender(port=port, mode=PUB, data_header_compression=config. CAMERA_BSREAD_DATA_HEADER_COMPRESSION) sender.open(no_client_action=no_client_timeout, no_client_timeout=config.MFLOW_NO_CLIENTS_TIMEOUT) camera_name = camera.get_name() camera_stream = camera.get_stream() _logger.info("Connecting to camera '%s' over bsread.", camera_name) process_parameters() # register_image_channel(x_size, y_size, dtype) statistics.counter = 0 camera_stream.connect() # This signals that the camera has successfully started. stop_event.clear() while not stop_event.is_set(): try: data = camera_stream.receive() # In case of receiving error or timeout, the returned data is None. if data is None: continue image = data.data.data[camera_name + config.EPICS_PV_SUFFIX_IMAGE].value # Rotate and mirror the image if needed - this is done in the epics:_get_image for epics cameras. image = transform_image(image, camera.camera_config) # Numpy is slowest dimension first, but bsread is fastest dimension first. height, width = image.shape pulse_id = data.data.pulse_id timestamp_s = data.data.global_timestamp timestamp_ns = data.data.global_timestamp_offset timestamp = timestamp_s + (timestamp_ns / 1e9) data = { "image": image, "height": height, "width": width, "x_axis": x_axis, "y_axis": y_axis, "timestamp": timestamp } sender.send(data=data, pulse_id=pulse_id, timestamp=timestamp, check_data=True) while not parameter_queue.empty(): new_parameters = parameter_queue.get() camera.camera_config.set_configuration(new_parameters) process_parameters() except Exception as e: _logger.exception("Could not process message.", e) stop_event.set() _logger.info("Stopping transceiver.") except Exception as e: _logger.exception("Error while processing camera stream.", e) finally: # Wait for termination / update configuration / etc. stop_event.wait() if camera_stream: camera.disconnect() if sender: sender.close()
def process_epics_camera(stop_event, statistics, parameter_queue, camera, port): """ Start the camera stream and listen for image monitors. This function blocks until stop_event is set. :param stop_event: Event when to stop the process. :param statistics: Statistics namespace. :param parameter_queue: Parameters queue to be passed to the pipeline. :param camera: Camera instance to get the images from. :param port: Port to use to bind the output stream. """ sender = None try: # If there is no client for some time, disconnect. def no_client_timeout(): _logger.info( "No client connected to the '%s' stream for %d seconds. Closing instance." % (camera.get_name(), config.MFLOW_NO_CLIENTS_TIMEOUT)) stop_event.set() def process_parameters(): nonlocal x_size, y_size, x_axis, y_axis x_size, y_size = camera.get_geometry() x_axis, y_axis = camera.get_x_y_axis() sender.add_channel("image", metadata={ "compression": config.CAMERA_BSREAD_IMAGE_COMPRESSION, "shape": [x_size, y_size], "type": "uint16" }) x_size = y_size = x_axis = y_axis = None camera.connect() sender = Sender(port=port, mode=PUB, data_header_compression=config. CAMERA_BSREAD_DATA_HEADER_COMPRESSION) # Register the bsread channels - compress only the image. sender.add_channel("width", metadata={ "compression": config.CAMERA_BSREAD_SCALAR_COMPRESSION, "type": "int64" }) sender.add_channel("height", metadata={ "compression": config.CAMERA_BSREAD_SCALAR_COMPRESSION, "type": "int64" }) sender.add_channel("timestamp", metadata={ "compression": config.CAMERA_BSREAD_SCALAR_COMPRESSION, "type": "float64" }) sender.add_channel("x_axis", metadata={ "compression": config.CAMERA_BSREAD_SCALAR_COMPRESSION, "type": "float32" }) sender.add_channel("y_axis", metadata={ "compression": config.CAMERA_BSREAD_SCALAR_COMPRESSION, "type": "float32" }) sender.open(no_client_action=no_client_timeout, no_client_timeout=config.MFLOW_NO_CLIENTS_TIMEOUT) process_parameters() statistics.counter = 0 def collect_and_send(image, timestamp): nonlocal x_size, y_size, x_axis, y_axis # Data to be sent over the stream. data = { "image": image, "timestamp": timestamp, "width": x_size, "height": y_size, "x_axis": x_axis, "y_axis": y_axis } try: sender.send(data=data, timestamp=timestamp, check_data=False) except Again: _logger.warning( "Send timeout. Lost image with timestamp '%s'." % timestamp) while not parameter_queue.empty(): new_parameters = parameter_queue.get() camera.camera_config.set_configuration(new_parameters) process_parameters() camera.add_callback(collect_and_send) # This signals that the camera has successfully started. stop_event.clear() except: _logger.exception("Error while processing camera stream.") finally: # Wait for termination / update configuration / etc. stop_event.wait() camera.disconnect() if sender: sender.close()
def store_pipeline(stop_event, statistics, parameter_queue, cam_client, pipeline_config, output_stream_port, background_manager): # TODO: Implement statistics: n_clients, input_throughput def no_client_timeout(): _logger.warning( "No client connected to the pipeline stream for %d seconds. Closing instance." % config.MFLOW_NO_CLIENTS_TIMEOUT) stop_event.set() source = None sender = None try: camera_stream_address = cam_client.get_camera_stream( pipeline_config.get_camera_name()) camera_name = pipeline_config.get_camera_name() _logger.debug("Connecting to camera %s on stream address %s.", camera_name, camera_stream_address) source_host, source_port = get_host_port_from_stream_address( camera_stream_address) source = Source(host=source_host, port=source_port, receive_timeout=config.PIPELINE_RECEIVE_TIMEOUT, mode=SUB) source.connect() _logger.debug("Opening output stream on port %d.", output_stream_port) sender = Sender(port=output_stream_port, mode=PUSH, data_header_compression=config. CAMERA_BSREAD_DATA_HEADER_COMPRESSION, block=False) sender.open(no_client_action=no_client_timeout, no_client_timeout=config.MFLOW_NO_CLIENTS_TIMEOUT) # TODO: Register proper channels. # Indicate that the startup was successful. stop_event.clear() _logger.debug("Transceiver started.") while not stop_event.is_set(): try: data = source.receive() # In case of receiving error or timeout, the returned data is None. if data is None: continue forward_data = {camera_name: data.data.data["image"].value} pulse_id = data.data.pulse_id timestamp = (data.data.global_timestamp, data.data.global_timestamp_offset) sender.send(data=forward_data, pulse_id=pulse_id, timestamp=timestamp) except: _logger.exception("Could not process message.") stop_event.set() _logger.info("Stopping transceiver.") except: _logger.exception( "Exception while trying to start the receive and process thread.") raise finally: if source: source.disconnect() if sender: sender.close()
def processing_pipeline(stop_event, statistics, parameter_queue, cam_client, pipeline_config, output_stream_port, background_manager): # TODO: Implement statistics: n_clients, input_throughput def no_client_timeout(): _logger.warning( "No client connected to the pipeline stream for %d seconds. Closing instance." % config.MFLOW_NO_CLIENTS_TIMEOUT) stop_event.set() def process_pipeline_parameters(): parameters = pipeline_config.get_configuration() _logger.debug("Processing pipeline parameters %s.", parameters) background_array = None if parameters.get("image_background_enable"): background_id = pipeline_config.get_background_id() _logger.debug("Image background enabled. Using background_id %s.", background_id) background_array = background_manager.get_background(background_id) size_x, size_y = cam_client.get_camera_geometry( pipeline_config.get_camera_name()) image_region_of_interest = parameters.get("image_region_of_interest") if image_region_of_interest: _, size_x, _, size_y = image_region_of_interest _logger.debug("Image width %d and height %d.", size_x, size_y) return parameters, background_array source = None sender = None try: pipeline_parameters, image_background_array = process_pipeline_parameters( ) camera_stream_address = cam_client.get_camera_stream( pipeline_config.get_camera_name()) _logger.debug("Connecting to camera stream address %s.", camera_stream_address) source_host, source_port = get_host_port_from_stream_address( camera_stream_address) source = Source(host=source_host, port=source_port, receive_timeout=config.PIPELINE_RECEIVE_TIMEOUT, mode=SUB) source.connect() _logger.debug("Opening output stream on port %d.", output_stream_port) sender = Sender(port=output_stream_port, mode=PUB, data_header_compression=config. CAMERA_BSREAD_DATA_HEADER_COMPRESSION) sender.open(no_client_action=no_client_timeout, no_client_timeout=config.MFLOW_NO_CLIENTS_TIMEOUT) # TODO: Register proper channels. # Indicate that the startup was successful. stop_event.clear() _logger.debug("Transceiver started.") while not stop_event.is_set(): try: while not parameter_queue.empty(): new_parameters = parameter_queue.get() pipeline_config.set_configuration(new_parameters) pipeline_parameters, image_background_array = process_pipeline_parameters( ) data = source.receive() # In case of receiving error or timeout, the returned data is None. if data is None: continue image = data.data.data["image"].value x_axis = data.data.data["x_axis"].value y_axis = data.data.data["y_axis"].value processing_timestamp = data.data.data["timestamp"].value processed_data = process_image(image, processing_timestamp, x_axis, y_axis, pipeline_parameters, image_background_array) processed_data["width"] = processed_data["image"].shape[1] processed_data["height"] = processed_data["image"].shape[0] pulse_id = data.data.pulse_id timestamp = (data.data.global_timestamp, data.data.global_timestamp_offset) sender.send(data=processed_data, timestamp=timestamp, pulse_id=pulse_id) except: _logger.exception("Could not process message.") stop_event.set() _logger.info("Stopping transceiver.") except: _logger.exception( "Exception while trying to start the receive and process thread.") raise finally: if source: source.disconnect() if sender: sender.close()