def test_start_missing_header(self):

        with sender(port=self.STREAM_PORT) as output_stream:
            with source(host="localhost", port=self.STREAM_PORT) as input_stream:

                for index in range(10):
                    data = {"fast_source": index,
                            "slow_source": None}

                    output_stream.send(data=data)
                    self.writer.write_message(input_stream.receive(handler=self.handler.receive))

                for index in range(10, 20):
                    data = {"fast_source": index,
                            "slow_source": index}

                    output_stream.send(data=data)
                    self.writer.write_message(input_stream.receive(handler=self.handler.receive))

        self.writer.close()

        file = h5py.File(self.OUTPUT_FILE)

        fast_source = file["/data/fast_source/data"]
        slow_source = file["/data/slow_source/data"]

        self.assertIsNotNone(fast_source)
        self.assertIsNotNone(slow_source)

        self.assertListEqual(list(fast_source), list(range(20)))

        self.assertListEqual(list(slow_source[:10]), [0] * 10)
        self.assertListEqual(list(slow_source[10:]), list(range(10, 20)))

        file.close()
    def test_classic_interaction(self):
        response = requests.get(self.rest_url + "status").json()
        self.assertEqual(response["state"], "ok")
        self.assertEqual(response["status"], "waiting")

        response = requests.post(self.rest_url + "parameters", json={}).json()
        self.assertEqual(response["state"], 'error')
        self.assertTrue("Missing mandatory" in response["status"])

        parameters = {
            "general/created": "today",
            "general/user": "******",
            "general/process": "dia",
            "general/instrument": "jungfrau"
        }

        response = requests.post(self.rest_url + "parameters",
                                 json=parameters).json()
        self.assertEqual(response["state"], "ok")
        self.assertEqual(response["status"], "waiting")
        self.assertEqual(response["parameters"], parameters)

        requests.put(self.rest_url + "start_pulse_id/3")

        response = requests.get(self.rest_url + "statistics").json()
        self.assertEqual(response["state"], "ok")
        self.assertEqual(response["statistics"]["start_pulse_id"], 3)
        self.assertEqual(response["statistics"]["stop_pulse_id"], None)

        timestamp = 0
        timestamp_offset = 0

        data = {"device1": 0, "device2": 0, "device3": None}

        with sender(port=self.stream_port, mode=PUSH,
                    queue_size=1) as output_stream:
            for index in range(10):
                pulse_id = index
                data["device1"] = index + 100
                data["device2"] = index + 200

                output_stream.send(timestamp=(timestamp, timestamp_offset),
                                   pulse_id=pulse_id,
                                   data=data)

                # Somewhere before the end of the acquisition.
                if pulse_id == 6:
                    requests.put(self.rest_url + "stop_pulse_id/8")

                    response = requests.get(self.rest_url +
                                            "statistics").json()
                    self.assertEqual(response["state"], "ok")
                    self.assertEqual(response["statistics"]["start_pulse_id"],
                                     3)
                    self.assertEqual(response["statistics"]["stop_pulse_id"],
                                     8)

        sleep(0.5)
        self.assertFalse(self.writer_process.is_alive())
    def test_change_header(self):

        with sender(port=self.STREAM_PORT) as output_stream:
            with source(host="localhost", port=self.STREAM_PORT) as input_stream:

                for index in range(10):
                    data = {"scalar_source": index,
                            "changing_source": index}

                    output_stream.send(data=data)
                    self.writer.write_message(input_stream.receive(handler=self.handler.receive))

                for index in range(10, 20):
                    data = {"scalar_source": index,
                            "changing_source": [index, index, index]}

                    output_stream.send(data=data)
                    self.writer.write_message(input_stream.receive(handler=self.handler.receive))

        self.writer.close()

        file = h5py.File(self.OUTPUT_FILE)

        scalar_source = file["/data/scalar_source/data"]
        changing_source_1 = file["/data/changing_source/data(1)"]
        changing_source_2 = file["/data/changing_source/data"]

        self.assertIsNotNone(scalar_source)
        self.assertIsNotNone(changing_source_1)
        self.assertIsNotNone(changing_source_2)

        self.assertEqual(len(scalar_source), 20)
        self.assertEqual(len(changing_source_1), 10)
        self.assertEqual(len(changing_source_2), 20)

        self.assertListEqual(list(scalar_source), list(range(20)))
        self.assertListEqual(list(changing_source_1[:10]), list(range(10)))

        for index in range(10):
            self.assertListEqual(list(changing_source_2[index]), [0] * 3)

        for index in range(10, 20):
            self.assertListEqual(list(changing_source_2[index]), [index] * 3)

        file.close()
    def test_classic_interaction(self):

        data = {}

        with sender(port=self.stream_port, mode=PUSH,
                    queue_size=1) as output_stream:
            for index in range(10):
                pulse_id = index
                data["device1"] = index + 100
                data["device2"] = index + 200
                data["device3"] = None

                output_stream.send(pulse_id=pulse_id, data=data)

        self.runningEvent.clear()
        sleep(1.5)

        self.assertFalse(self.buffer_process.is_alive())
Beispiel #5
0
def generate_stream(output_port=8888, n_images=1000, delay=0.1):
    """
    Open a stream and generate the simulated messages. This call is blocking.
    :param output_port: Port to bind the output stream to.
    :param n_images: Number of images to send (-1 == infinite)
    :param delay: How much time in seconds to wait between each message. 0.1 == 10Hz.
    """

    with sender(port=output_port) as output_stream:

        if n_images == -1:
            while True:
                generate_and_send_message(output_stream)
                sleep(delay)

        else:
            for _ in range(n_images):
                generate_and_send_message(output_stream)
                sleep(delay)
Beispiel #6
0
    def test_change_roi_while_running(self):
        client = PsssProcessingClient("http://localhost:10000/")

        roi = [0, 1024, 0, 1024]
        client.set_roi(roi)

        processed_data = []

        client.start()

        with source(host="localhost", port=12000, mode=PULL) as input_stream:
            # First pulse_id comes before the source connects.
            for index in range(self.n_images - 1):
                message = input_stream.receive()
                processed_data.append(message)

        updated_roi = [100, 200, 100, 200]
        client.set_roi(updated_roi)

        data_to_send = {
            self.pv_name_prefix + config.EPICS_PV_SUFFIX_IMAGE: self.image
        }

        with sender(port=10001) as output_stream:
            for x in range(self.n_images):
                output_stream.send(data=data_to_send)

        with source(host="localhost", port=12000, mode=PULL) as input_stream:
            for index in range(self.n_images):
                message = input_stream.receive()
                processed_data.append(message)

        client.stop()

        processing_parameters_name = self.pv_name_prefix + config.EPICS_PV_SUFFIX_IMAGE + ".processing_parameters"

        start_processing_parameters = json.loads(
            processed_data[0].data.data[processing_parameters_name].value)
        end_processing_parameters = json.loads(
            processed_data[8].data.data[processing_parameters_name].value)

        self.assertListEqual(roi, start_processing_parameters["roi"])
        self.assertListEqual(updated_roi, end_processing_parameters["roi"])
Beispiel #7
0
    def test_receive_timeout(self):
        from bsread import source
        from bsread.sender import sender

        with source(host="localhost", port=9999,
                    receive_timeout=10) as in_stream:

            with sender(queue_size=10) as stream:

                # Send Data
                # stream.send(one=1, two=2)
                # stream.send(one=3, two=4)

                # Receive and check data
                # If timeout is not working - this will hang forever - and therefore the test will fail
                message = in_stream.receive()
                if message:
                    print(message.data.data['one'].value)

                self.assertIsNone(message)
def send_bsread_message(output_port,
                        message_buffer,
                        running_event,
                        mode=PUSH,
                        buffer_timeout=0.01):

    _logger.info("Output stream binding to port '%s'.", output_port)

    try:

        with sender(port=output_port, mode=mode,
                    queue_size=1) as output_stream:

            while running_event.is_set():

                if len(message_buffer) == 0:
                    sleep(buffer_timeout)
                    continue

                message, message_timestamp = message_buffer.popleft()

                data = {}
                for value_name, bsread_value in message.data.data.items():
                    data[value_name] = bsread_value.value

                _logger.debug("Sending message with pulse_id '%s'.",
                              message.data.pulse_id)

                output_stream.send(timestamp=message_timestamp,
                                   pulse_id=message.data.pulse_id,
                                   data=data,
                                   check_data=True)

                _logger.debug("Message with pulse_id '%s' forwarded.",
                              message.data.pulse_id)

    except Exception as e:
        running_event.clear()
        _logger.error("Exception happened in sending thread. Stopping buffer.",
                      e)
Beispiel #9
0
    def test_receive_filter(self):
        from bsread import source
        from bsread.sender import sender

        def filter_method(m):
            print(m.data.data['two'].value)
            return m.data.data['two'].value <= 4

        with source(host="localhost", port=9999) as in_stream:

            with sender(queue_size=10) as stream:

                # Send Data
                stream.send(one=1, two=12.0)
                stream.send(one=2, two=4)
                stream.send(one=3, two=10)
                stream.send(one=4, two=9.5)

                # Receive and check data
                # If timeout is not working - this will hang forever - and therefore the test will fail
                message = in_stream.receive(
                    filter=lambda m: m.data.data['two'].value <= 4)
                # message = in_stream.receive(filter=filter_method)
                print(message.data.data['one'].value)
simulated_camera = CameraSimulation(camera_config=CameraConfig("simulation"),
                                    size_x=image_size_x, size_y=image_size_y)
x_axis, y_axis = simulated_camera.get_x_y_axis()
x_size, y_size = simulated_camera.get_geometry()

# Documentation: https://github.com/datastreaming/cam_server#pipeline_configuration
pipeline_parameters = {
    "camera_name": "simulation"
}

pipeline_config = PipelineConfig("test_pipeline", pipeline_parameters)
parameters = pipeline_config.get_configuration()

image_number = 0

with sender(port=output_stream_port, mode=output_stream_mode) as output_stream:
    # Get simulated image.
    image = simulated_camera.get_image()

    # Generate timestamp.
    timestamp = time.time()

    # Pass data to processing pipeline.
    processed_data = process_image(image, 0, timestamp, x_axis, y_axis, pipeline_parameters)

    # Set height and width.
    processed_data["width"] = processed_data["image"].shape[1]
    processed_data["height"] = processed_data["image"].shape[0]

    print("Sending image number: ", image_number)
    image_number += 1
 def send_data():
     sleep(1)
     with sender(port=10000) as output_stream:
         for x in range(n_images):
             output_stream.send(data=data_to_send)
 def send_data():
     with sender(port=10000, queue_size=100) as output_stream:
         for x in range(n_images):
             output_stream.send(data=data_to_send)
Beispiel #13
0
 def send_data():
     with sender(port=11000) as output_stream:
         for x in range(self.n_images):
             output_stream.send(data=data_to_send)
    def stream_processor(running_flag, roi, parameters, statistics):
        try:
            running_flag.set()

            _logger.info("Connecting to input_stream_host %s and input_stream_port %s.",
                         input_stream_host, input_stream_port)

            _logger.info("Sending out data on stream port %s.", output_stream_port)

            if output_pv_name:
                _logger.info("Sending out data on EPICS PV %s.", output_pv_name)
                epics.ca.clear_cache()
                output_pv = PV(output_pv_name)
            else:
                _logger.warning("Output EPICS PV not specified. Only bsread will be sent out.")

            with source(host=input_stream_host, port=input_stream_port, mode=PULL,
                        queue_size=config.INPUT_STREAM_QUEUE_SIZE,
                        receive_timeout=config.INPUT_STREAM_RECEIVE_TIMEOUT) as input_stream:

                with sender(port=output_stream_port, send_timeout=config.OUTPUT_STREAM_SEND_TIMEOUT,
                            block=False) as output_stream:

                    statistics["processing_start_time"] = str(datetime.datetime.now())
                    statistics["last_sent_pulse_id"] = None
                    statistics["last_sent_time"] = None
                    statistics["last_calculated_spectrum"] = None
                    statistics["n_processed_images"] = 0

                    image_property_name = epics_pv_name_prefix + config.EPICS_PV_SUFFIX_IMAGE

                    _logger.info("Using image_to_process property name '%s'.", image_property_name)

                    while running_flag.is_set():

                        message = input_stream.receive()

                        if message is None:
                            continue

                        pulse_id = message.data.pulse_id
                        timestamp = (message.data.global_timestamp, message.data.global_timestamp_offset)

                        _logger.debug("Received message with pulse_id %s", pulse_id)

                        image_to_process = message.data.data[image_property_name].value

                        processed_data = process_image(image_to_process,
                                                       image_property_name,
                                                       roi,
                                                       parameters["min_threshold"],
                                                       parameters["max_threshold"],
                                                       parameters["rotation"])

                        try:
                            output_stream.send(pulse_id=pulse_id,
                                               timestamp=timestamp,
                                               data=processed_data)

                            _logger.debug("Sent message with pulse_id %s", pulse_id)

                            statistics["last_sent_pulse_id"] = pulse_id
                            statistics["last_sent_time"] = str(datetime.datetime.now())
                        except Again:
                            pass

                        statistics["last_calculated_spectrum"] = processed_data[image_property_name + ".spectrum"]
                        statistics["n_processed_images"] = statistics.get("n_processed_images", 0) + 1

                        if output_pv_name:
                            output_pv.put(processed_data[image_property_name + ".spectrum"])
                            _logger.debug("caput on %s for pulse_id %s", output_pv, pulse_id)

        except Exception as e:
            _logger.error("Error while processing the stream. Exiting. Error: ", e)
            running_flag.clear()

            raise

        except KeyboardInterrupt:
            _logger.warning("Terminating processing due to user request.")
            running_flag.clear()

            raise
Beispiel #15
0
    def stream_processor(running_flag, roi_signal, roi_background, statistics):
        try:
            running_flag.set()

            _logger.info(
                "Connecting to input_stream_host %s and input_stream_port %s.",
                input_stream_host, input_stream_port)

            _logger.info("Sending out data on stream port %s.",
                         data_output_stream_port)

            with source(host=input_stream_host,
                        port=input_stream_port,
                        mode=PULL,
                        queue_size=config.INPUT_STREAM_QUEUE_SIZE,
                        receive_timeout=config.INPUT_STREAM_RECEIVE_TIMEOUT
                        ) as input_stream:

                with sender(port=data_output_stream_port,
                            send_timeout=config.DATA_OUTPUT_STREAM_SEND_TIMEOUT
                            ) as data_output_stream:

                    with sender(
                            port=image_output_stream_port,
                            block=False,
                            queue_size=config.IMAGE_OUTPUT_STREAM_QUEUE_SIZE
                    ) as image_output_stream:

                        statistics["processing_start_time"] = str(
                            datetime.datetime.now())
                        statistics["last_sent_pulse_id"] = None
                        statistics["last_sent_time"] = None
                        statistics["n_processed_images"] = 0

                        image_property_name = epics_pv_name_prefix + config.EPICS_PV_SUFFIX_IMAGE

                        _logger.info("Using image property name '%s'.",
                                     image_property_name)

                        while running_flag.is_set():

                            message = input_stream.receive()

                            if message is None:
                                continue

                            pulse_id = message.data.pulse_id
                            timestamp = (message.data.global_timestamp,
                                         message.data.global_timestamp_offset)

                            _logger.debug("Received message with pulse_id %s",
                                          pulse_id)

                            image = message.data.data[
                                image_property_name].value
                            processed_data = process_image(
                                pulse_id, image, image_property_name,
                                roi_signal, roi_background)

                            # Send out processed data.
                            try:
                                data_output_stream.send(pulse_id=pulse_id,
                                                        timestamp=timestamp,
                                                        data=processed_data)

                                _logger.debug(
                                    "Sent data message with pulse_id %s",
                                    pulse_id)

                                statistics["last_sent_pulse_id"] = pulse_id
                                statistics["last_sent_time"] = str(
                                    datetime.datetime.now())
                                statistics[
                                    "n_processed_images"] = statistics.get(
                                        "n_processed_images", 0) + 1

                            except Again:
                                pass

                            # Send out image.
                            try:
                                image_output_stream.send(
                                    pulse_id=pulse_id,
                                    timestamp=timestamp,
                                    data={image_property_name: image})

                                _logger.debug(
                                    "Sent image message with pulse_id %s",
                                    pulse_id)

                            except Again:
                                pass

        except Exception as e:
            _logger.error(
                "Error while processing the stream. Exiting. Error: ", e)
            running_flag.clear()

            raise

        except KeyboardInterrupt:
            _logger.warning("Terminating processing due to user request.")
            running_flag.clear()

            raise
    def test_buffer_writer_integration(self):

        with sender(port=self.buffer_stream_port, queue_size=100,
                    mode=PUSH) as source_stream:

            self.buffer_process.start()
            sleep(0.5)

            for index in range(0, 10):
                data = {
                    "normal_source": index,
                    "slow_source": None,
                    "changing_source": index
                }

                source_stream.send(data=data)

            for index in range(10, 20):
                data = {
                    "normal_source": index,
                    "slow_source": index,
                    "changing_source": [index] * 3
                }

                source_stream.send(data=data)

            # 31, because the writer stops AFTER receiving the first message it should not write.
            for index in range(20, 31):
                data = {
                    "normal_source": index,
                    "slow_source": None,
                    "changing_source": [index] * 3
                }

                source_stream.send(data=data)

        self.writer_process.start()
        sleep(0.5)

        response = requests.get(self.writer_rest_url + "status").json()
        self.assertEqual(response["state"], "ok")
        self.assertEqual(response["status"], "waiting")

        parameters = {
            "general/created": "today",
            "general/user": "******",
            "general/process": "dia",
            "general/instrument": "jungfrau"
        }

        response = requests.post(self.writer_rest_url + "parameters",
                                 json=parameters).json()
        self.assertEqual(response["state"], "ok")
        self.assertEqual(response["status"], "waiting")
        self.assertEqual(response["parameters"], parameters)

        requests.put(self.writer_rest_url + "start_pulse_id/0")

        response = requests.get(self.writer_rest_url + "statistics").json()
        self.assertEqual(response["state"], "ok")
        self.assertEqual(response["statistics"]["start_pulse_id"], 0)
        self.assertEqual(response["statistics"]["stop_pulse_id"], None)

        requests.put(self.writer_rest_url + "stop_pulse_id/29")

        # Wait for the writer to terminate.
        sleep(1.5)

        self.assertFalse(self.writer_process.is_alive(),
                         "Writer process should be dead by now.")
        self.assertTrue(self.buffer_process.is_alive(),
                        "Buffer process should be still alive.")

        file = h5py.File(self.writer_output_file)

        normal_source = file["/data/normal_source/data"]
        slow_source = file["/data/slow_source/data"]
        changing_source_1 = file["/data/changing_source/data(1)"]
        changing_source_2 = file["/data/changing_source/data"]

        self.assertEqual(len(normal_source), 30)
        self.assertEqual(len(slow_source), 30)
        self.assertEqual(len(changing_source_1), 10)
        self.assertEqual(len(changing_source_2), 30)
Beispiel #17
0
 def send_data():
     with sender(port=10001, queue_size=100) as output_stream:
         sleep(1)
         for x in range(self.n_images):
             output_stream.send(data=data_to_send)
Beispiel #18
0
    def stream_processor(running_flag, parameters, statistics):
        try:
            running_flag.set()

            _logger.info("Connecting to input_stream_host %s and input_stream_port %s.",
                         input_stream_host, input_stream_port)

            _logger.info("Sending out data on stream port %s.", data_output_stream_port)
            _logger.info("Sending out images on stream port %s.", image_output_stream_port)

            if output_pv_name:
                _logger.info("Sending out spectrum data on EPICS PV %s.", output_pv_name)
                epics.ca.clear_cache()
                output_pv = epics.PV(output_pv_name)
            else:
                _logger.warning("Output EPICS PV not specified. Only bsread will be sent out.")

            if center_pv_name:
                _logger.info("Sending out spectrum center on EPICS PV %s.", center_pv_name)
                center_pv = epics.PV(center_pv_name)
            else:
                _logger.warning("Output EPICS PV not specified. Only bsread will be sent out.")

            if fwhm_pv_name:
                _logger.info("Sending out spectrum fwhm on EPICS PV %s.", fwhm_pv_name)
                fwhm_pv = epics.PV(fwhm_pv_name)
            else:
                _logger.warning("Output EPICS PV not specified. Only bsread will be sent out.")
            # EPICS PV for vertical ROI
            if ymin_pv_name:
                ymin_pv = epics.PV(ymin_pv_name)
                ymin_pv.wait_for_connection()
            if ymax_pv_name:
                ymax_pv = epics.PV(ymax_pv_name)
                ymax_pv.wait_for_connection()
            if axis_pv_name:
                axis_pv = epics.PV(axis_pv_name)
                axis_pv.wait_for_connection()

            roi = [0, 0]

            with source(host=input_stream_host, port=input_stream_port, mode=PULL,
                        queue_size=config.INPUT_STREAM_QUEUE_SIZE,
                        receive_timeout=config.INPUT_STREAM_RECEIVE_TIMEOUT) as input_stream:

                with sender(port=data_output_stream_port, send_timeout=config.OUTPUT_STREAM_SEND_TIMEOUT,
                            block=False) as data_output_stream:

                    with sender(port=image_output_stream_port, send_timeout=config.OUTPUT_STREAM_SEND_TIMEOUT,
                                block=False, queue_size=config.IMAGE_OUTPUT_STREAM_QUEUE_SIZE) as image_output_stream:


                        # use zmq zero-copy for image data
                        image_output_stream.stream.zmq_copy = False
                        image_output_stream.stream.zmq_track = True

                        statistics["processing_start_time"] = str(datetime.datetime.now())
                        statistics["last_sent_pulse_id"] = None
                        statistics["last_sent_time"] = None
                        statistics["last_calculated_spectrum"] = None
                        statistics["n_processed_images"] = 0

                        image_property_name = epics_pv_name_prefix + config.EPICS_PV_SUFFIX_IMAGE

                        _logger.info("Using image_to_process property name '%s'.", image_property_name)

                        while running_flag.is_set():

                            try:
                                message = input_stream.receive()
                            except:
                                _logger.exception("input stream receiving error")
                                continue

                            if message is None:
                                continue

                            start_time = time.time()
                            pulse_id = message.data.pulse_id
                            timestamp = (message.data.global_timestamp, message.data.global_timestamp_offset)
                            image_to_process = message.data.data[image_property_name].value

                            if image_to_process is None:
                                continue

                            _logger.debug("Received message with pulse_id %s", pulse_id)

                            image_data = {image_property_name: image_to_process}

                            if ymin_pv_name and ymin_pv.connected:
                                roi[0] = ymin_pv.value
                            if ymax_pv_name and ymax_pv.connected:
                                roi[1] = ymax_pv.value
                            if axis_pv_name and axis_pv.connected:
                                axis = axis_pv.value
                            else:
                                axis = None

                            if axis is None or len(axis) != image_to_process.shape[1]:
                                _logger.warn("Invalid energy axis")
                                continue

                            processed_data = process_image(image_to_process,
                                                           axis,
                                                           epics_pv_name_prefix,
                                                           roi,
                                                           parameters)

                            try:
                                data_output_stream.send(pulse_id=pulse_id,
                                                        timestamp=timestamp,
                                                        data=processed_data)

                                _logger.debug("Sent data message with pulse_id %s", pulse_id)

                                statistics["last_sent_pulse_id"] = pulse_id
                                statistics["last_sent_time"] = str(datetime.datetime.now())
                            except zmq.Again:
                                pass

                            try:
                                image_output_stream.send(pulse_id=pulse_id,
                                                         timestamp=timestamp,
                                                         data=image_data)

                                _logger.debug("Sent image message with pulse_id %s", pulse_id)
                            except zmq.Again:
                                pass

                            statistics["last_calculated_spectrum"] = processed_data[epics_pv_name_prefix +
                                                                                    ":SPECTRUM_Y"]
                            statistics["n_processed_images"] = statistics.get("n_processed_images", 0) + 1

                            if output_pv_name and output_pv.connected:
                                output_pv.put(processed_data[epics_pv_name_prefix + ":SPECTRUM_Y"])
                                _logger.debug("caput on %s for pulse_id %s", output_pv, pulse_id)

                            if center_pv_name and center_pv.connected:
                                center_pv.put(processed_data[epics_pv_name_prefix + ":SPECTRUM_CENTER"])

                            if fwhm_pv_name and fwhm_pv.connected:
                                fwhm_pv.put(processed_data[epics_pv_name_prefix + ":SPECTRUM_FWHM"])

                            duration = (time.time() - start_time) * 1000
                            statistics["last_processing_duration_ms"] = duration

        except Exception as e:
            _logger.error("Error while processing the stream. Exiting. Error: ", e)
            running_flag.clear()

            raise

        except KeyboardInterrupt:
            _logger.warning("Terminating processing due to user request.")
            running_flag.clear()

            raise