def remove_source(self, source_id): """method to remove sources from the loop iterator""" self.source_ids.pop(self.source_ids.index(source_id)) kwargs = self.source_default_keys.copy() kwargs['channels'] = self.source_ids if self.source: self.source.disconnect() self.source = Source(**kwargs)
def register_source(self, source_id): """method to register sources to be read in the loop iterator""" if not (source_id in self.source_ids): self.source_ids.append(source_id) kwargs = self.source_default_keys.copy() kwargs['channels'] = self.source_ids if self.source: self.source.disconnect() self.source = Source(**kwargs)
class EventHandler_SFEL: """Example EventHandler object. The class wraps data source specific reader code into a standardized object with well-defined methods for initialisation and reading.""" def __init__(self,source_default_keys=dict(host="localhost", port=9999, config_port=None, conn_type='connect', mode=None, queue_size=100, copy=True, config_address=None, all_channels=False, receive_timeout=None, dispatcher_url='https://dispatcher-api.psi.ch/sf', dispatcher_verify_request=True, dispatcher_disble_compression=False)): self.source_default_keys = source_default_keys self.source = None self.source_ids = [] def get_all_source_ids(self): """Dummy method which should interface to some interface providing availabe data seources (Detectors).""" return dispatcher.get_current_channels() def register_source(self, source_id): """method to register sources to be read in the loop iterator""" if not (source_id in self.source_ids): self.source_ids.append(source_id) kwargs = self.source_default_keys.copy() kwargs['channels'] = self.source_ids if self.source: self.source.disconnect() self.source = Source(**kwargs) def remove_source(self, source_id): """method to remove sources from the loop iterator""" self.source_ids.pop(self.source_ids.index(source_id)) kwargs = self.source_default_keys.copy() kwargs['channels'] = self.source_ids if self.source: self.source.disconnect() self.source = Source(**kwargs) def create_event_generator(self): self.stream = self.source.connect() return iter(EventGenerator_SFEL(self.stream))
def get_stream(self, timeout=config.ZMQ_RECEIVE_TIMEOUT, data_change_callback=None): source_host, source_port = get_host_port_from_stream_address(self.bsread_stream_address) self.bsread_source = Source(host=source_host, port=source_port, mode=PULL, receive_timeout=timeout) self.bsread_source.handler = Handler(data_change_callback) return self.bsread_source
def get_stream(self): self.verify_camera_online() self._collect_camera_settings() source_host, source_port = get_host_port_from_stream_address(self.bsread_stream_address) self.bsread_source = Source(host=source_host, port=source_port, mode=PULL, receive_timeout=config.ZMQ_RECEIVE_TIMEOUT) return self.bsread_source
class StreamReader: def __init__(self, host='localhost', port=9999): self.source = Source('localhost', 9999) self.s = self.source.connect() def readStream(self, Nevents): data = [] for n in range(Nevents): m = self.s.receive() data.append([m.data.data[par].value for par in pars]) return np.asarray(data)
def _connect_bsread(self, host, port): # Configure the connection type. if config.bs_connection_mode.lower() == "sub": mode = mflow.SUB elif config.bs_connection_mode.lower() == "pull": mode = mflow.PULL if host and port: self.stream = Source(host=host, port=port, queue_size=config.bs_queue_size, receive_timeout=config.bs_receive_timeout, mode=mode) else: channels = [x.identifier for x in self.properties ] + [x.identifier for x in self.conditions] self.stream = Source(channels=channels, queue_size=config.bs_queue_size, receive_timeout=config.bs_receive_timeout, mode=mode) self.stream.connect()
def create_source(camera_stream_address, receive_timeout=config.PIPELINE_RECEIVE_TIMEOUT, mode=SUB): source_host, source_port = get_host_port_from_stream_address( camera_stream_address) if camera_stream_address.startswith("ipc"): return IpcSource(address=camera_stream_address, receive_timeout=receive_timeout, mode=mode) else: return Source(host=source_host, port=source_port, receive_timeout=receive_timeout, mode=mode)
def _collect_camera_settings(self): try: #self.width_raw, self.height_raw = 659, 494 source_host, source_port = get_host_port_from_stream_address(self.bsread_stream_address) stream = Source(host=source_host, port=source_port, mode=PULL,receive_timeout=3000) stream.connect() data = stream.receive() image = data.data.data[self.camera_config.get_source() + config.EPICS_PV_SUFFIX_IMAGE].value if image is None: self.height_raw, self.width_raw = 0,0 else: image = transform_image(image, self.camera_config) self.height_raw, self.width_raw = image.shape except: raise RuntimeError("Could not fetch camera settings cam_server:{}".format(self.camera_config.get_source())) finally: stream.disconnect()
class EventHandler_SFEL: """Example EventHandler object. The class wraps data source specific reader code into a standardized object with well-defined methods for initialisation and reading.""" def __init__(self, host='localhost', port=9999): self.source = Source('localhost', 9999) self.read_Ids = [] def getSourceIDs(self): """Dummy method which should interface to some interface providing availabe data seources (Detectors).""" return ['i0', 'i', 't', 'i_pump', 'pump_on', 'pulseId', 'labTime'] def registerSource(self, sourceID): """Dummy method to register sources to be read in the loop iterator""" pass def eventGenerator(self): self.stream = self.source.connect() return iter(EventGenerator_SFEL(self.stream))
def __init__(self, host='localhost', port=9999): self.source = Source('localhost', 9999) self.s = self.source.connect()
def __init__(self, host='localhost', port=9999): self.source = Source('localhost', 9999) self.read_Ids = []
channels_definitions = {} for n_connection in range(n_connections): _logger.info("Starting connection number %d." % n_connection) stream_address = dispatcher.request_stream(channels) source_host, source_port = stream_address.rsplit(":", maxsplit=1) source_host = source_host.split("//")[1] source_port = int(source_port) _logger.info("Input stream host '%s' and port '%s'.", source_host, source_port) stream = Source(host=source_host, port=source_port, mode=SUB, receive_timeout=1000) stream.connect() n_received_messages = 0 while n_received_messages < n_messages_per_connection: message = stream.receive(handler=handler.receive) # In case you set a receive timeout, the returned message can be None. if message is None: _logger.debug("Empty message.") continue n_received_messages += 1
def __init__(self, host="localhost", port=9999): self.source = Source("localhost", 9999) self.s = self.source.connect()
def store_pipeline(stop_event, statistics, parameter_queue, cam_client, pipeline_config, output_stream_port, background_manager): # TODO: Implement statistics: n_clients, input_throughput def no_client_timeout(): _logger.warning( "No client connected to the pipeline stream for %d seconds. Closing instance." % config.MFLOW_NO_CLIENTS_TIMEOUT) stop_event.set() source = None sender = None try: camera_stream_address = cam_client.get_camera_stream( pipeline_config.get_camera_name()) camera_name = pipeline_config.get_camera_name() _logger.debug("Connecting to camera %s on stream address %s.", camera_name, camera_stream_address) source_host, source_port = get_host_port_from_stream_address( camera_stream_address) source = Source(host=source_host, port=source_port, receive_timeout=config.PIPELINE_RECEIVE_TIMEOUT, mode=SUB) source.connect() _logger.debug("Opening output stream on port %d.", output_stream_port) sender = Sender(port=output_stream_port, mode=PUSH, data_header_compression=config. CAMERA_BSREAD_DATA_HEADER_COMPRESSION, block=False) sender.open(no_client_action=no_client_timeout, no_client_timeout=config.MFLOW_NO_CLIENTS_TIMEOUT) # TODO: Register proper channels. # Indicate that the startup was successful. stop_event.clear() _logger.debug("Transceiver started.") while not stop_event.is_set(): try: data = source.receive() # In case of receiving error or timeout, the returned data is None. if data is None: continue forward_data = {camera_name: data.data.data["image"].value} pulse_id = data.data.pulse_id timestamp = (data.data.global_timestamp, data.data.global_timestamp_offset) sender.send(data=forward_data, pulse_id=pulse_id, timestamp=timestamp) except: _logger.exception("Could not process message.") stop_event.set() _logger.info("Stopping transceiver.") except: _logger.exception( "Exception while trying to start the receive and process thread.") raise finally: if source: source.disconnect() if sender: sender.close()
def processing_pipeline(stop_event, statistics, parameter_queue, cam_client, pipeline_config, output_stream_port, background_manager): # TODO: Implement statistics: n_clients, input_throughput def no_client_timeout(): _logger.warning( "No client connected to the pipeline stream for %d seconds. Closing instance." % config.MFLOW_NO_CLIENTS_TIMEOUT) stop_event.set() def process_pipeline_parameters(): parameters = pipeline_config.get_configuration() _logger.debug("Processing pipeline parameters %s.", parameters) background_array = None if parameters.get("image_background_enable"): background_id = pipeline_config.get_background_id() _logger.debug("Image background enabled. Using background_id %s.", background_id) background_array = background_manager.get_background(background_id) size_x, size_y = cam_client.get_camera_geometry( pipeline_config.get_camera_name()) image_region_of_interest = parameters.get("image_region_of_interest") if image_region_of_interest: _, size_x, _, size_y = image_region_of_interest _logger.debug("Image width %d and height %d.", size_x, size_y) return parameters, background_array source = None sender = None try: pipeline_parameters, image_background_array = process_pipeline_parameters( ) camera_stream_address = cam_client.get_camera_stream( pipeline_config.get_camera_name()) _logger.debug("Connecting to camera stream address %s.", camera_stream_address) source_host, source_port = get_host_port_from_stream_address( camera_stream_address) source = Source(host=source_host, port=source_port, receive_timeout=config.PIPELINE_RECEIVE_TIMEOUT, mode=SUB) source.connect() _logger.debug("Opening output stream on port %d.", output_stream_port) sender = Sender(port=output_stream_port, mode=PUB, data_header_compression=config. CAMERA_BSREAD_DATA_HEADER_COMPRESSION) sender.open(no_client_action=no_client_timeout, no_client_timeout=config.MFLOW_NO_CLIENTS_TIMEOUT) # TODO: Register proper channels. # Indicate that the startup was successful. stop_event.clear() _logger.debug("Transceiver started.") while not stop_event.is_set(): try: while not parameter_queue.empty(): new_parameters = parameter_queue.get() pipeline_config.set_configuration(new_parameters) pipeline_parameters, image_background_array = process_pipeline_parameters( ) data = source.receive() # In case of receiving error or timeout, the returned data is None. if data is None: continue image = data.data.data["image"].value x_axis = data.data.data["x_axis"].value y_axis = data.data.data["y_axis"].value processing_timestamp = data.data.data["timestamp"].value processed_data = process_image(image, processing_timestamp, x_axis, y_axis, pipeline_parameters, image_background_array) processed_data["width"] = processed_data["image"].shape[1] processed_data["height"] = processed_data["image"].shape[0] pulse_id = data.data.pulse_id timestamp = (data.data.global_timestamp, data.data.global_timestamp_offset) sender.send(data=processed_data, timestamp=timestamp, pulse_id=pulse_id) except: _logger.exception("Could not process message.") stop_event.set() _logger.info("Stopping transceiver.") except: _logger.exception( "Exception while trying to start the receive and process thread.") raise finally: if source: source.disconnect() if sender: sender.close()
class ReadGroupInterface(object): """ Provide a beam synchronous acquisition for PV data. """ def __init__(self, properties, conditions=None, host=None, port=None, filter_function=None): """ Create the bsread group read interface. :param properties: List of PVs to read for processing. :param conditions: List of PVs to read as conditions. :param filter_function: Filter the BS stream with a custom function. """ self.host = host self.port = port self.properties = convert_to_list(properties) self.conditions = convert_to_list(conditions) self.filter = filter_function self._message_cache = None self._message_cache_timestamp = None self._connect_bsread(config.bs_default_host, config.bs_default_port) def _connect_bsread(self, host, port): # Configure the connection type. if config.bs_connection_mode.lower() == "sub": mode = mflow.SUB elif config.bs_connection_mode.lower() == "pull": mode = mflow.PULL if host and port: self.stream = Source(host=host, port=port, queue_size=config.bs_queue_size, receive_timeout=config.bs_receive_timeout, mode=mode) else: channels = [x.identifier for x in self.properties ] + [x.identifier for x in self.conditions] self.stream = Source(channels=channels, queue_size=config.bs_queue_size, receive_timeout=config.bs_receive_timeout, mode=mode) self.stream.connect() @staticmethod def is_message_after_timestamp(message, timestamp): """ Check if the received message was captured after the provided timestamp. :param message: Message to inspect. :param timestamp: Timestamp to compare the message to. :return: True if the message is after the timestamp, False otherwise. """ # Receive might timeout, in this case we have nothing to compare. if not message: return False # This is how BSread encodes the timestamp. current_sec = int(timestamp) current_ns = int(math.modf(timestamp)[0] * 1e9) message_sec = message.data.global_timestamp message_ns = message.data.global_timestamp_offset # If the seconds are the same, the nanoseconds must be equal or larger. if message_sec == current_sec: return message_ns >= current_ns # If the seconds are not the same, the message seconds need to be larger than the current seconds. else: return message_sec > current_sec @staticmethod def _get_missing_property_default(property_definition): """ In case a bs read value is missing, either return the default value or raise an Exception. :param property_definition: :return: """ # Exception is defined, raise it. if Exception == property_definition.default_value: raise property_definition.default_value( "Property '%s' missing in bs stream." % property_definition.identifier) # Else just return the default value. else: return property_definition.default_value def _read_pvs_from_cache(self, properties): """ Read the requested properties from the cache. :param properties: List of properties to read. :return: List with PV values. """ if not self._message_cache: raise ValueError("Message cache is empty, cannot read PVs %s." % properties) pv_values = [] for property_name, property_definition in ((x.identifier, x) for x in properties): if property_name in self._message_cache.data.data: value = self._message_cache.data.data[property_name].value else: value = self._get_missing_property_default(property_definition) # TODO: Check if the python conversion works in every case? # BS read always return numpy, and we always convert it to Python. pv_values.append(value) return pv_values def read(self): """ Reads the PV values from BSread. It uses the first PVs data sampled after the invocation of this method. :return: List of values for read pvs. Note: Condition PVs are excluded. """ read_timestamp = time() while time() - read_timestamp < config.bs_read_timeout: message = self.stream.receive(filter=self.filter) if self.is_message_after_timestamp(message, read_timestamp): self._message_cache = message self._message_cache_timestamp = read_timestamp return self._read_pvs_from_cache(self.properties) else: raise Exception( "Read timeout exceeded for BS read stream. Could not find the desired package in time." ) def read_cached_conditions(self): """ Returns the conditions associated with the last read command. :return: List of condition values. """ return self._read_pvs_from_cache(self.conditions) def close(self): """ Disconnect from the stream and clear the message cache. """ if self.stream: self.stream.disconnect() self._message_cache = None self._message_cache_timestamp = None