def basic_sg():
    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(system input 2 always) => unbuffered 1 using copy_all_a')

    return sg
def usertick_sg():
    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(system input 3 always) => counter 1 using copy_latest_a')
    sg.add_config(SlotIdentifier.FromString('controller'),
                  config_fast_tick_secs, 'uint32_t', 2)

    return sg
def callrpc_sg():
    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node(
        '(system input 2 always && constant 1 always) => unbuffered 2 using call_rpc'
    )
    log.push(DataStream.FromString('constant 1'),
             IOTileReading(0, 0, 0x000a8000))

    return sg
def tick2_sg():
    """A sensorgrah that listens to tick1."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(system input 6 always) => counter 1 using copy_latest_a')
    sg.add_config(SlotIdentifier.FromString('controller'), config_tick2_secs,
                  'uint32_t', 2)

    return sg
def test_basic_sensorgraph():
    """Make sure we can parse, load and run a basic sensor graph."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(input 1 always && input 2 when count >= 1) => unbuffered 1 using copy_all_a')
    sg.process_input(DataStream.FromString('input 1'), IOTileReading(0, 1, 1), rpc_executor=None)
    sg.process_input(DataStream.FromString('input 2'), IOTileReading(0, 1, 1), rpc_executor=None)

    assert sg.sensor_log.inspect_last(DataStream.FromString('unbuffered 1')).value == 1
def test_string_generation():
    """Make sure we can print nodes."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node(
        '(input 1 always && input 2 when count >= 1) => buffered node 1 using copy_all_a'
    )
    assert str(
        sg.nodes[-1]
    ) == u'(input 1 always && input 2 when count >= 1) => buffered 1 using copy_all_a'

    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)
    sg.add_node(
        '(input 1 when value < 0x10) => buffered node 1 using copy_all_a')
    assert str(sg.nodes[-1]
               ) == u'(input 1 when value < 16) => buffered 1 using copy_all_a'
Exemple #7
0
def test_triggering_streamers():
    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(input 1 always) => output 1 using copy_all_a')
    sg.add_node('(input 1 always) => output 2 using copy_all_a')

    sg.add_streamer(parse_string_descriptor('streamer on output 1'))
    sg.add_streamer(
        parse_string_descriptor('manual streamer on output 2 with streamer 0'))

    triggered = sg.check_streamers()
    assert len(triggered) == 0

    sg.process_input(DataStream.FromString('input 1'),
                     IOTileReading(0, 1, 1),
                     rpc_executor=None)
    triggered = sg.check_streamers()
    assert len(triggered) == 2
Exemple #8
0
def test_iteration():
    """Make sure we can iterate over the graph."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node(
        '(input 1 always && input 2 when count >= 1) => unbuffered 1 using copy_all_a'
    )
    sg.add_node(
        '(input 1 always && input 3 when count >= 1) => unbuffered 2 using copy_all_a'
    )
    sg.add_node(
        '(unbuffered 2 always && unbuffered 1 always) => unbuffered 3 using copy_all_a'
    )
    sg.add_node('(unbuffered 1 always) => unbuffered 3 using copy_all_a')

    iterator = sg.iterate_bfs()

    node1, in1, out1 = next(iterator)
    assert str(node1.stream) == u'unbuffered 1'
    assert len(in1) == 0
    assert len(out1) == 2
    assert str(out1[0].stream) == u'unbuffered 3'
    assert str(out1[1].stream) == u'unbuffered 3'

    node1, in1, out1 = next(iterator)
    assert str(node1.stream) == u'unbuffered 2'
    assert len(in1) == 0
    assert len(out1) == 1
    assert str(out1[0].stream) == u'unbuffered 3'

    node1, in1, out1 = next(iterator)
    assert str(node1.stream) == u'unbuffered 3'
    assert len(in1) == 2
    assert len(out1) == 0
    assert str(in1[0].stream) == u'unbuffered 2'
    assert str(in1[1].stream) == u'unbuffered 1'

    node1, in1, out1 = next(iterator)
    assert str(node1.stream) == u'unbuffered 3'
    assert len(in1) == 1
    assert len(out1) == 0
    assert str(in1[0].stream) == u'unbuffered 1'
Exemple #9
0
class SensorGraphSubsystem(object):
    """Container for sensor graph state.

    There is a distinction between which sensor-graph is saved into persisted
    storage vs currently loaded and running.  This subsystem needs to be
    created with a shared mutex with the sensor_log subsystem to make sure
    all accesses are properly synchronized.
    """

    def __init__(self, sensor_log_system, stream_manager, model):
        self._logger = logging.getLogger(__name__)

        self._model = model

        self._sensor_log = sensor_log_system.storage
        self._allocate_id = sensor_log_system.allocate_id

        self._stream_manager = stream_manager
        self._rsl = sensor_log_system

        self.graph = SensorGraph(self._sensor_log, model=model, enforce_limits=True)

        self.persisted_exists = False
        self.persisted_nodes = []
        self.persisted_streamers = []
        self.persisted_constants = []

        self.streamer_acks = {}
        self.streamer_status = {}

        self.enabled = False

        # Clock manager linkage
        self.get_timestamp = lambda: 0

    def clear_to_reset(self, _config_vars):
        """Clear all volatile information across a reset.

        The reset behavior is that:
        - any persisted sensor_graph is loaded
        - if there is a persisted graph found, enabled is set to True
        - if there is a persisted graph found, reset readings are pushed
          into it.
        """

        self.graph.clear()

        if not self.persisted_exists:
            return

        for node in self.persisted_nodes:
            self.graph.add_node(node)

        for streamer_desc in self.persisted_streamers:
            streamer = streamer_descriptor.parse_string_descriptor(streamer_desc)
            self.graph.add_streamer(streamer)

        # Load in the constants
        for stream, reading in self.persisted_constants:
            self._sensor_log.push(stream, reading)

        self.enabled = True

        # Set up all streamers
        for index, value in viewitems(self.streamer_acks):
            self._seek_streamer(index, value)

        #FIXME: queue sending reset readings

    def process_input(self, encoded_stream, value):
        """Process or drop a graph input.

        This must not be called directly from an RPC but always via a deferred
        task.
        """

        if not self.enabled:
            return

        stream = DataStream.FromEncoded(encoded_stream)
        reading = IOTileReading(self.get_timestamp(), encoded_stream, value)

        self.graph.process_input(stream, reading, None)  #FIXME: add in an rpc executor for this device.

        self.process_streamers()

    def _seek_streamer(self, index, value):
        """Complex logic for actually seeking a streamer to a reading_id.

        This routine hides all of the gnarly logic of the various edge cases.
        In particular, the behavior depends on whether the reading id is found,
        and if it is found, whether it belongs to the indicated streamer or not.

        If not, the behavior depends on whether the sought reading it too high
        or too low.
        """

        highest_id = self._rsl.highest_stored_id()

        streamer = self.graph.streamers[index]
        if not streamer.walker.buffered:
            return _pack_sgerror(SensorLogError.CANNOT_USE_UNBUFFERED_STREAM)

        find_type = None
        try:
            exact = streamer.walker.seek(value, target='id')
            if exact:
                find_type = 'exact'
            else:
                find_type = 'other_stream'

        except UnresolvedIdentifierError:
            if value > highest_id:
                find_type = 'too_high'
            else:
                find_type = 'too_low'

        # If we found an exact match, move one beyond it

        if find_type == 'exact':
            try:
                streamer.walker.pop()
            except StreamEmptyError:
                pass

            error = Error.NO_ERROR
        elif find_type == 'too_high':
            streamer.walker.skip_all()
            error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
        elif find_type == 'too_low':
            streamer.walker.seek(0, target='offset')
            error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
        else:
            error = _pack_sgerror(SensorLogError.ID_FOUND_FOR_ANOTHER_STREAM)

        return error

    def acknowledge_streamer(self, index, ack, force):
        """Acknowledge a streamer value as received from the remote side."""

        if index >= len(self.graph.streamers):
            return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)

        old_ack = self.streamer_acks.get(index, 0)

        if ack != 0:
            if ack <= old_ack and not force:
                return _pack_sgerror(SensorGraphError.OLD_ACKNOWLEDGE_UPDATE)

            self.streamer_acks[index] = ack

        current_ack = self.streamer_acks.get(index, 0)
        return self._seek_streamer(index, current_ack)

    def _handle_streamer_finished(self, index, succeeded, highest_ack):
        """Callback when a streamer finishes processing."""

        self._logger.debug("Rolling back streamer %d after streaming, highest ack from streaming subsystem was %d", index, highest_ack)
        self.acknowledge_streamer(index, highest_ack, False)

    def process_streamers(self):
        """Check if any streamers should be handed to the stream manager."""

        # Check for any triggered streamers and pass them to stream manager
        in_progress = self._stream_manager.in_progress()
        triggered = self.graph.check_streamers(blacklist=in_progress)

        for streamer in triggered:
            self._stream_manager.process_streamer(streamer, callback=self._handle_streamer_finished)

    def trigger_streamer(self, index):
        """Pass a streamer to the stream manager if it has data."""

        self._logger.debug("trigger_streamer RPC called on streamer %d", index)

        if index >= len(self.graph.streamers):
            return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)

        if index in self._stream_manager.in_progress():
            return _pack_sgerror(SensorGraphError.STREAM_ALREADY_IN_PROGRESS)

        streamer = self.graph.streamers[index]
        if not streamer.triggered(manual=True):
            return _pack_sgerror(SensorGraphError.STREAMER_HAS_NO_NEW_DATA)

        self._logger.debug("calling mark_streamer on streamer %d from trigger_streamer RPC", index)
        self.graph.mark_streamer(index)

        self.process_streamers()

        return Error.NO_ERROR

    def count_nodes(self):
        """Count the number of nodes."""

        return len(self.graph.nodes)

    def persist(self):
        """Trigger saving the current sensorgraph to persistent storage."""

        self.persisted_nodes = self.graph.dump_nodes()
        self.persisted_streamers = self.graph.dump_streamers()
        self.persisted_exists = True
        self.persisted_constants = self._sensor_log.dump_constants()

    def reset(self):
        """Clear the sensorgraph from RAM and flash."""

        self.persisted_exists = False
        self.persisted_nodes = []
        self.persisted_streamers = []
        self.persisted_constants = []
        self.graph.clear()

        self.streamer_status = {}

    def add_node(self, binary_descriptor):
        """Add a node to the sensor_graph using a binary node descriptor.

        Args:
            binary_descriptor (bytes): An encoded binary node descriptor.

        Returns:
            int: A packed error code.
        """

        try:
            node_string = parse_binary_descriptor(binary_descriptor)
        except:
            self._logger.exception("Error parsing binary node descriptor: %s", binary_descriptor)
            return _pack_sgerror(SensorGraphError.INVALID_NODE_STREAM)  # FIXME: Actually provide the correct error codes here

        try:
            self.graph.add_node(node_string)
        except NodeConnectionError:
            return _pack_sgerror(SensorGraphError.STREAM_NOT_IN_USE)
        except ProcessingFunctionError:
            return _pack_sgerror(SensorGraphError.INVALID_PROCESSING_FUNCTION)
        except ResourceUsageError:
            return _pack_sgerror(SensorGraphError.NO_NODE_SPACE_AVAILABLE)

        return Error.NO_ERROR

    def add_streamer(self, binary_descriptor):
        """Add a streamer to the sensor_graph using a binary streamer descriptor.

        Args:
            binary_descriptor (bytes): An encoded binary streamer descriptor.

        Returns:
            int: A packed error code
        """

        streamer = streamer_descriptor.parse_binary_descriptor(binary_descriptor)

        try:
            self.graph.add_streamer(streamer)
            self.streamer_status[len(self.graph.streamers) - 1] = StreamerStatus()

            return Error.NO_ERROR
        except ResourceUsageError:
            return _pack_sgerror(SensorGraphError.NO_MORE_STREAMER_RESOURCES)

    def inspect_streamer(self, index):
        """Inspect the streamer at the given index."""

        if index >= len(self.graph.streamers):
            return [_pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED), b'\0'*14]

        return [Error.NO_ERROR, streamer_descriptor.create_binary_descriptor(self.graph.streamers[index])]

    def inspect_node(self, index):
        """Inspect the graph node at the given index."""

        if index >= len(self.graph.nodes):
            raise RPCErrorCode(6)  #FIXME: use actual error code here for UNKNOWN_ERROR status

        return create_binary_descriptor(str(self.graph.nodes[index]))

    def query_streamer(self, index):
        """Query the status of the streamer at the given index."""

        if index >= len(self.graph.streamers):
            return None

        info = self.streamer_status[index]
        highest_ack = self.streamer_acks.get(index, 0)

        return [info.last_attempt_time, info.last_success_time, info.last_error, highest_ack, info.last_status, info.attempt_number, info.comm_status]