Example #1
0
    async def run(self) -> None:
        """
        The main task, that reads data from the sensors and pushes it onto the event_bus.
        """
        # Generate the UUIDs of new sensors
        sensor_stream = stream.chain(
            stream.iterate(
                iterate_safely(f"{self.__topic}/get",
                               f"{self.__topic}/status_update")),
            stream.iterate(event_bus.subscribe(f"{self.__topic}/add_host")),
        ) | pipe.flatmap(
            lambda item: stream.chain(
                (stream.call(event_bus.call, f"{self.__topic}/get_config", item
                             ) | catch.pipe(TopicNotRegisteredError)),
                stream.iterate(
                    event_bus.subscribe(f"nodes/by_uuid/{item}/update")),
            )
            | pipe.until(lambda config: config is None)
            | pipe.map(lambda config: config if self._is_config_valid(
                self.__node_id, config) else None)
            | pipe.map(self._create_transport)
            | pipe.switchmap(lambda transport: stream.empty() if transport is
                             None else stream.iterate(transport.stream_data()))
            | pipe.action(lambda data: event_bus.publish("wamp/publish", data)
                          ))

        await sensor_stream
Example #2
0
    def _stream_config_updates(
            sensor: TinkerforgeSensor) -> AsyncGenerator[dict[str, Any], None]:
        """
        Tries to fetch a config from the database. It also listens to 'nodes/tinkerforge/$UID/update' for new configs
        from the database.
        Parameters
        ----------
        sensor: TinkerforgeSensor
            The brick or bricklet for which to fetch a config from the database

        Returns
        -------
        AsyncGenerator of dict
            A dict containing the configuration of the device
        """
        return stream.chain(
            stream.call(
                call_safely,
                "db_tinkerforge_sensors/get_config",
                "db_tinkerforge_sensors/status_update",
                sensor.device.uid,
            )
            | pipe.takewhile(lambda config: config is not None),
            stream.iterate(
                event_bus.subscribe(
                    f"nodes/tinkerforge/{sensor.device.uid}/update")),
        )
Example #3
0
    def _stream_data(self, transport):
        config_stream = (stream.chain(
            stream.call(
                call_safely,
                f"{self.__database_topic}/get_config",
                f"{self.__database_topic}/status_update",
                transport.uuid,
            ),
            stream.iterate(
                event_bus.subscribe(f"nodes/by_uuid/{transport.uuid}/add")),
        )
                         | pipe.map(lambda config: self._create_device(
                             transport, config))
                         | pipe.starmap(lambda config, device: stream.empty(
                         ) if device is None else device.stream_data(config))
                         | pipe.switch()
                         | context.pipe(
                             transport,
                             on_enter=lambda: logging.getLogger(__name__).info(
                                 "Connected to %s at %s (%s).", transport.name,
                                 transport.uri, transport.label),
                             on_exit=lambda: logging.getLogger(__name__).info(
                                 "Disconnected from %s at %s (%s).", transport.
                                 name, transport.uri, transport.label),
                         ))

        return config_stream
Example #4
0
    async def producer(self,
                       output_queue: asyncio.Queue[tuple[str,
                                                         dict[str, str | float
                                                              | int]]]):
        """
        Grabs the output data from the event bus and pushes it to a worker queue,
        so that multiple workers can then publish it via MQTT.

        Parameters
        ----------
        output_queue: asyncio.Queue
            The output queue, to aggregate the data to.
        """
        event: DataEvent
        async for event in event_bus.subscribe("wamp/publish"):
            try:
                # Events are DataEvents
                topic = event.topic
                payload = {
                    "timestamp": event.timestamp,
                    "uuid": str(event.sender),
                    "sid": event.sid,
                    "value": event.value,
                    "unit": event.unit,
                }
            except Exception:  # pylint: disable=broad-except
                self.__logger.exception(
                    "Malformed data received. Dropping data: %s", event)
            else:
                output_queue.put_nowait((topic, payload))
async def call_safely(topic: str, status_topic: str, *args: Any,
                      **kwargs: Any) -> Any:
    """
    Call a topic on the eventbus. If the topic is not yet registered, register at the status topic and wait for
    the data source to become available.
    Parameters
    ----------
    topic: str
        The data source topic
    status_topic:
        The data source status update topic, that will be listened to, if the source is not available
    *args; Any
        The arguments passed to the eventbus topic
    **kwargs: Any
        The keyword arguments passed to the eventbus topic

    Returns
    -------
    Any
        The result of the event_bus function call.
    """
    while "database not ready":
        try:
            result = await event_bus.call(topic, *args, **kwargs)
        except TopicNotRegisteredError:
            # The database is not yet ready, wait for it
            status: bool  # TODO: Replace with proper event
            async for status in event_bus.subscribe(status_topic):
                if status:
                    break
            continue
        else:
            return result
Example #6
0
    def stream_data(self) -> AsyncGenerator[DataEvent, None]:
        """
        Generate the initial configuration of the sensor, configure it, and finally stream the data from the sensor.
        If there is a configuration update, reconfigure the sensor and start streaming again.
        Returns
        -------
        AsyncGenerator of DataEvent
            The data from the device
        """
        # Generates the first configuration
        # Query the database and if it does not have a config for the sensor, wait until there is one

        data_stream = (
            stream.chain(
                stream.call(
                    call_safely, "db_labnode_sensors/get_config", "db_labnode_sensors/status_update", self.__uuid
                )
                | pipe.takewhile(lambda config: config is not None),
                stream.iterate(event_bus.subscribe(f"nodes/by_uuid/{self.__uuid}/update")),
            )
            | pipe.action(
                lambda config: logging.getLogger(__name__).info(
                    "Got new configuration for: %s",
                    self._device,
                )
            )
            | pipe.map(self._create_config)
            | pipe.switchmap(
                lambda config: stream.empty()
                if config is None or not config["enabled"]
                else (self._configure_and_stream(config))
            )
        )

        return data_stream
Example #7
0
    def stream_data(self,
                    config: dict[str, Any]) -> AsyncGenerator[DataEvent, None]:
        """
        Stream the data from the sensor.
        Parameters
        ----------
        config: dict
            A dictionary containing the sensor configuration.

        Returns
        -------
        AsyncGenerator
            The asynchronous stream.
        """
        data_stream = (stream.chain(
            stream.just(config),
            stream.iterate(
                event_bus.subscribe(f"nodes/by_uuid/{self.__uuid}/update")))
                       |
                       pipe.action(lambda _: logging.getLogger(__name__).info(
                           "Got new configuration for: %s", self) if config is
                                   not None else logging.getLogger(__name__).
                                   info("Removed configuration for: %s", self))
                       | pipe.map(self._parse_config)
                       | pipe.switchmap(lambda conf: stream.empty(
                       ) if conf is None or not conf["enabled"] else
                                        (self._configure_and_stream(conf))))

        return data_stream
Example #8
0
    def stream_data(self) -> AsyncGenerator[DataEvent, None]:
        """
        Generate the initial configuration of the sensor, configure it, and finally stream the data from the sensor.
        If there is a configuration update, reconfigure the sensor and start streaming again.
        Returns
        -------
        AsyncGenerator of DataEvent
            The data from the device
        """
        # Generates the first configuration
        # Query the database and if it does not have a config for the sensor, wait until there is one

        data_stream = stream.chain(
            stream.just(self),
            stream.iterate(
                event_bus.subscribe(
                    f"nodes/tinkerforge/{self.device.uid}/remove"))[:1]
            | pipe.map(lambda x: None),
        ) | pipe.switchmap(
            lambda sensor: stream.empty() if sensor is None else
            (self._stream_config_updates(sensor)
             | pipe.switchmap(lambda config: stream.chain(
                 stream.just(config),
                 stream.iterate(
                     event_bus.subscribe(
                         f"nodes/by_uuid/{config['uuid']}/remove"))[:1]
                 | pipe.map(lambda x: None),
             ))
             | pipe.action(lambda config: logging.getLogger(__name__).info(
                 "Got new configuration for: %s",
                 sensor.device,
             ))
             | pipe.map(self._create_config)
             | pipe.switchmap(lambda config: stream.empty()
                              if config is None or not config["enabled"] else
                              (self._configure_and_stream(config)))))

        return data_stream
async def iterate_safely(topic: str, status_topic: str, *args: Any,
                         **kwargs: Any) -> AsyncGenerator[Any, None]:
    """
    Iterate over a topic on the eventbus. If the topic is not yet registered, register at the status topic and wait for
    the data source to become available.

    Parameters
    ----------
    topic: str
        The data source topic
    status_topic:
        The data source status update topic, that will be listened to, if the source is not available
    *args; Any
        The arguments passed to the eventbus topic
    **kwargs: Any
        The keyword arguments passed to the eventbus topic

    Yields
    -------
    Any
        The data returned from the subscription

    """
    while "database not ready":
        try:
            gen = await event_bus.call(topic, *args, **kwargs)
        except NameError:
            # The database is not yet ready, wait for it
            status: bool  # TODO: Replace with proper event
            async for status in event_bus.subscribe(status_topic):
                if status:
                    break
            continue
        else:
            async for item in gen:
                yield item
        break