Beispiel #1
0
 async def stream_state_down(switches):
     """ Executes the async behavior to be verified. """
     out_queue = janus.Queue()
     sink = AsyncStreamToQueue(cast(asyncio.Queue, out_queue.async_q))
     # Holds tuples of Janus in-queues & pipes
     down_pipes = []
     for switch in switches:
         topic = topic_name(switch.get_node().topic_prefix(),
                            TOPIC_DIRECTION_DOWNSTREAM)
         logger.debug("Topic: %s", topic)
         in_queue = janus.Queue()
         source = AsyncStreamFromQueue(
             cast(asyncio.Queue, in_queue.async_q))
         stream = (source
                   | Op.map(lambda msg, sw=switch: {sw.device_id: msg})
                   | Op.map(lambda msg, sw=switch: sw.get_node().coder.
                            encode(msg))
                   | Op.map(lambda msg, sw=switch, tp=topic: {
                       'topic': tp,
                       'message': msg
                   }))
         pipe = MessagePipe(source, stream, sink)
         source.start()
         down_pipes.append((in_queue, pipe))
     down_pipes[0][0].sync_q.put({'state': OnOffState.ON})
     down_pipes[1][0].sync_q.put({'state': OnOffState.ON})
     down_pipes[2][0].sync_q.put({'state': OnOffState.ON})
     down_pipes[0][0].sync_q.put({'state': OnOffState.OFF})
     await asyncio.gather(down_pipes[0][1].source.queue.join(),
                          down_pipes[1][1].source.queue.join(),
                          down_pipes[2][1].source.queue.join())
     for pipe in down_pipes:
         pipe[1].source.stop()
     return out_queue
Beispiel #2
0
 async def stream_state_up(switch, state):
     """ Async execution part of test. """
     in_queue = janus.Queue()
     source = AsyncStreamFromQueue(cast(asyncio.Queue,
                                        in_queue.async_q))
     out_queue = janus.Queue()
     sink = AsyncStreamToQueue(cast(asyncio.Queue, out_queue.async_q))
     topic = topic_name(switch.get_node().topic_prefix(),
                        TOPIC_DIRECTION_DOWNSTREAM)
     stream = (
         source
         | Op.map(lambda msg, sw=switch: ({
             sw.device_id: msg
         }, logger.debug("Map device ID %s: %s", sw.device_id, msg))[0])
         |
         Op.map(lambda msg, sw=switch: sw.get_node().coder.encode(msg))
         | Op.map(lambda msg, sw=switch, tp=topic: {
             'topic': tp,
             'message': msg
         }))
     # noinspection PyUnusedLocal
     message_stream = MessagePipe(source, stream, sink)
     source.start()
     in_queue.sync_q.put(state)
     await in_queue.async_q.join()
     source.stop()
     return out_queue
Beispiel #3
0
    def from_blueprint(self, plant: DevicePlant, blueprint: Dict[str, Any], owner: CompoundDevice = None) \
            -> DeviceAssembly:
        # noinspection DuplicatedCode
        """ Given a plan dictionary as above, construct the device.

            The upstream is constructed with the following operations:
                1. The main data stream is filtered for messages on the switch's parent node's upstream topic;
                2. Then, messages are decoded to a dictionary format from, e.g, JSON;
                3. The resultant dictionary is further filtered by this device's ID (to separate out from possible
                   multiple devices in the message);
                4. Then, the message payload is extracted;
                5. Finally, the message state is set via a tap.
            At its end, the upstream flow presents an Observable for use by clients. This flow contains just
            messages from the specific device.

            The downstream is constructed with the following operations:
                1. The input payload is put in a dictionary with the device ID as the key.
                2. The result is encoded with the device's coder.
                3. A dictionary with the topic and the encoded message is created.

            :param plant:     The device plant the assembly is being added to.
            :param blueprint: A blueprint in the form of the dictionary above.
            :param owner:     The owner (usually the node) for this device
            :returns:         A device bundle with the BasicSwitch device object and the up/downstream data
                              sources/sinks.

            TODO: Currently just handles state. Add configuration too.
        """
        state = self.create_state(blueprint['config'])
        device = Sensor(blueprint['device_id'], state)
        device.owner_device = owner
        upstream_ops = (
            plant.upstream_source
            | Op.filter(lambda msg, dev=device: msg['topic'].startswith(
                dev.get_node().topic_prefix()))
            | Op.map(lambda msg, dev=device: dev.get_node().coder.decode(msg[
                'message']))
            | Op.filter(lambda msg_dict, dev=device: dev.device_id in msg_dict)
            | Op.map(lambda msg_dict, dev=device: msg_dict[dev.device_id])
            | Op.tap(lambda dev_msg, dev=device: dev.state.set_value(dev_msg[
                'state'])))
        downstream_source = AsyncStreamFromQueue(asyncio.Queue())
        downstream_ops = (
            downstream_source
            | Op.map(lambda msg, dev=device: {dev.device_id: msg})
            | Op.map(lambda msg, dev=device: dev.get_node().coder.encode(msg))
            | Op.map(
                lambda msg, dev=device: {
                    'topic':
                    topic_name(f'{dev.get_node().topic_prefix()}',
                               TOPIC_DIRECTION_DOWNSTREAM),
                    'message':
                    msg
                }))
        return self.assemble(plant, device, upstream_ops, downstream_source,
                             downstream_ops)
Beispiel #4
0
 async def send_message(node: NodeDevice):
     in_queue = janus.Queue()
     source = AsyncStreamFromQueue(cast(asyncio.Queue,
                                        in_queue.async_q))
     up_streams = []
     for (key, device) in node.sub_devices.items():
         # Stream operations:
         #   1. Only forward messages on topics belonging to the node
         #   2. Decode the message from JSON into a dictionary
         #   3. Only forward messages to the device that are intended (or partly intended) for it
         #   4. Pick out the part of the message intended for the device (each root key represents a device)
         #   5. Tap the stream to store new device state values.
         up_stream = (
             source
             | Op.filter(lambda msg: msg['topic'].startswith(
                 node.topic_prefix()))
             | Op.map(lambda msg: node.coder.decode(msg['message']))
             | Op.filter(lambda msg_dict, device_id=device.device_id:
                         device_id in msg_dict)
             | Op.map(lambda msg_dict, device_id=device.device_id:
                      msg_dict[device_id])
             | Op.tap(lambda dev_msg, dev=device: dev.state.set_value(
                 dev_msg['state'])))
         up_streams.append(
             MessagePipe(source, up_stream,
                         AsyncStreamToQueue(asyncio.Queue())))
     messages = [
         {
             'topic': 'hausnet/vendorname_switch/ABC012/upstream',
             'message': '{"switch_1": {"state": "OFF"}}'
         },
         {
             'topic': 'hausnet/vendorname_switch/ABC012/upstream',
             'message': '{"switch_2": {"state": "ON"}}'
         },
         {
             'topic': 'hausnet/vendorname_switch/ABC012/upstream',
             'message': '{"switch_1": {"state": "UNDEFINED"}}'
         },
     ]
     for message in messages:
         in_queue.sync_q.put(message)
     source.start()
     await source.queue.join()
     source.stop()
     messages = []
     for stream in up_streams:
         while stream.sink.queue.qsize() > 0:
             messages.append(await stream.sink.queue.get())
             stream.sink.queue.task_done()
     return messages
Beispiel #5
0
 async def stream_through_pipe() -> janus.Queue:
     """ Stream queue values through a mapping operation to an output queue. """
     in_q = janus.Queue()
     in_q.sync_q.put_nowait(1)
     in_q.sync_q.put_nowait(2)
     out_q = janus.Queue()
     sink = AsyncStreamToQueue(cast(asyncio.Queue, out_q.async_q))
     src = AsyncStreamFromQueue(cast(asyncio.Queue, in_q.async_q))
     stream = (src
               | Op.map(lambda msg: 10 * msg)
               | Op.map(lambda msg: 3 * msg))
     pipe = MessagePipe(src, stream, sink)
     pipe.source.start()
     await in_q.async_q.join()
     pipe.source.stop()
     return out_q
Beispiel #6
0
    def from_blueprint(self, plant: DevicePlant, blueprint: Dict[str, Any], owner: CompoundDevice = None) \
            -> DeviceAssembly:
        """ Given a plan dictionary as above, construct the device. The operations on the input (MQTT) data
            stream are:
                1. The main data stream is filtered for messages on the node's upstream topic;
                2. Then, messages are decoded to a dictionary format from, e.g, JSON;
                3. Messages are filtered so only those addressed to the node itself are passed through.
            At its end, the upstream flow presents an Observable for use by clients. This flow contains just messages
            from this node.

            :param plant:     The device plant the assembly is being added to.
            :param blueprint: A blueprint in the form of the dictionary above.
            :param owner:     Owning device, usually None for a NodeDevice
            :returns:         The device assembly for a node.

            TODO: Deal with module configuration messages
            TODO: Common first part of upstream & last of downstream - worth making generic? E.g. topic name can be
                  derived, it need not be specified per device.
            TODO: DRY failure? Stream ops for all devices of the same type should be the same?
        """
        device = NodeDevice(blueprint['device_id'])
        upstream_ops = (
            plant.upstream_source
            | Op.filter(lambda msg, dev=device: msg['topic'].startswith(
                dev.topic_prefix()))
            | Op.map(lambda msg, dev=device: dev.coder.decode(msg['message']))
            |
            Op.filter(lambda msg_dict, dev=device: dev.device_id in msg_dict))
        downstream_source = AsyncStreamFromQueue(asyncio.Queue())
        downstream_ops = (
            downstream_source
            | Op.map(lambda msg, dev=device: {dev.device_id: msg})
            | Op.map(lambda msg, dev=device: dev.get_node().coder.encode(msg))
            | Op.map(
                lambda msg, dev=device: {
                    'topic':
                    f'{dev.get_node().topic_prefix()}{TOPIC_DOWNSTREAM_APPENDIX}',
                    'message': msg
                }))
        return self.assemble(plant, device, upstream_ops, downstream_source,
                             downstream_ops)
Beispiel #7
0
 async def decode_json(msg):
     in_queue = janus.Queue()
     source = AsyncStreamFromQueue(cast(asyncio.Queue,
                                        in_queue.async_q))
     # Stream operations:
     #   1. Only forward messages on topics belonging to the node
     #   2. Decode the message from JSON into a dictionary
     stream = (
         source
         |
         Op.filter(lambda x: x['topic'].startswith(node.topic_prefix()))
         | Op.map(lambda x: node.coder.decode(x['message'])))
     up_stream = MessagePipe(source, stream,
                             AsyncStreamToQueue(asyncio.Queue()))
     in_queue.sync_q.put(msg)
     source.start()
     decoded = await up_stream.sink.queue.get()
     up_stream.sink.queue.task_done()
     await source.queue.join()
     source.stop()
     return decoded