Beispiel #1
0
    def __init__(self, sensor_log_system, stream_manager, model):
        self._logger = logging.getLogger(__name__)

        self._model = model

        self._sensor_log = sensor_log_system.storage
        self._allocate_id = sensor_log_system.allocate_id

        self._stream_manager = stream_manager
        self._rsl = sensor_log_system

        self.graph = SensorGraph(self._sensor_log, model=model, enforce_limits=True)

        self.persisted_exists = False
        self.persisted_nodes = []
        self.persisted_streamers = []
        self.persisted_constants = []

        self.streamer_acks = {}
        self.streamer_status = {}

        self.enabled = False

        # Clock manager linkage
        self.get_timestamp = lambda: 0
Beispiel #2
0
    def compile(self, model):
        """Compile this file into a SensorGraph.

        You must have preivously called parse_file to parse a
        sensor graph file into statements that are then executed
        by this command to build a sensor graph.

        The results are stored in self.sensor_graph and can be
        inspected before running optimization passes.

        Args:
            model (DeviceModel): The device model that we should compile
                this sensor graph for.
        """

        log = SensorLog(InMemoryStorageEngine(model), model)
        self.sensor_graph = SensorGraph(log, model)

        allocator = StreamAllocator(self.sensor_graph, model)

        self._scope_stack = []

        # Create a root scope
        root = RootScope(self.sensor_graph, allocator)
        self._scope_stack.append(root)

        for statement in self.statements:
            statement.execute(self.sensor_graph, self._scope_stack)

        self.sensor_graph.initialize_remaining_constants()
        self.sensor_graph.sort_nodes()
def basic_sg():
    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(system input 2 always) => unbuffered 1 using copy_all_a')

    return sg
def callrpc_sg():
    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node(
        '(system input 2 always && constant 1 always) => unbuffered 2 using call_rpc'
    )
    log.push(DataStream.FromString('constant 1'),
             IOTileReading(0, 0, 0x000a8000))

    return sg
def test_stream_allocation():
    """Make sure we can allocate DataStreams."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    # TODO Finish this function
    alloc = StreamAllocator(sg, model=model)

    stream1 = alloc.allocate_stream(DataStream.ConstantType)
    assert len(sg.nodes) == 0

    stream2 = alloc.attach_stream(stream1)
    assert len(sg.nodes) == 0

    stream3 = alloc.attach_stream(stream1)
    assert len(sg.nodes) == 0

    stream4 = alloc.attach_stream(stream1)
    assert len(sg.nodes) == 0

    stream5 = alloc.attach_stream(stream1)
    assert len(sg.nodes) == 1

    assert stream1 == stream2
    assert stream2 == stream3
    assert stream4 == stream1
    assert stream5 != stream1
Beispiel #6
0
    def __init__(self,
                 sensor_log_system,
                 stream_manager,
                 model,
                 emulator,
                 executor=None):
        super(SensorGraphSubsystem, self).__init__(emulator)

        self._logger = logging.getLogger(__name__)

        self._model = model

        self._sensor_log = sensor_log_system.storage
        self._allocate_id = sensor_log_system.allocate_id
        self._inputs = emulator.create_queue(register=True)

        self._stream_manager = stream_manager
        self._rsl = sensor_log_system
        self._executor = executor

        self.graph = SensorGraph(self._sensor_log,
                                 model=model,
                                 enforce_limits=True)

        self.persisted_exists = False
        self.persisted_nodes = []
        self.persisted_streamers = []
        self.persisted_constants = []

        self.streamer_acks = {}
        self.streamer_status = {}

        self.enabled = False

        # Clock manager linkage
        self.get_timestamp = lambda: 0
def usertick_sg():
    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(system input 3 always) => counter 1 using copy_latest_a')
    sg.add_config(SlotIdentifier.FromString('controller'),
                  config_fast_tick_secs, 'uint32_t', 2)

    return sg
Beispiel #8
0
def test_iteration():
    """Make sure we can iterate over the graph."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(input 1 always && input 2 when count >= 1) => unbuffered 1 using copy_all_a')
    sg.add_node('(input 1 always && input 3 when count >= 1) => unbuffered 2 using copy_all_a')
    sg.add_node('(unbuffered 2 always && unbuffered 1 always) => unbuffered 3 using copy_all_a')
    sg.add_node('(unbuffered 1 always) => unbuffered 3 using copy_all_a')

    iterator = sg.iterate_bfs()

    node1, in1, out1 = next(iterator)
    assert str(node1.stream) == u'unbuffered 1'
    assert len(in1) == 0
    assert len(out1) == 2
    assert str(out1[0].stream) == u'unbuffered 3'
    assert str(out1[1].stream) == u'unbuffered 3'

    node1, in1, out1 = next(iterator)
    assert str(node1.stream) == u'unbuffered 2'
    assert len(in1) == 0
    assert len(out1) == 1
    assert str(out1[0].stream) == u'unbuffered 3'

    node1, in1, out1 = next(iterator)
    assert str(node1.stream) == u'unbuffered 3'
    assert len(in1) == 2
    assert len(out1) == 0
    assert str(in1[0].stream) == u'unbuffered 2'
    assert str(in1[1].stream) == u'unbuffered 1'

    node1, in1, out1 = next(iterator)
    assert str(node1.stream) == u'unbuffered 3'
    assert len(in1) == 1
    assert len(out1) == 0
    assert str(in1[0].stream) == u'unbuffered 1'
Beispiel #9
0
def test_usertick():
    """Make sure we properly can set the user tick input."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    assert sg.get_tick('fast') == 0

    sg.add_config(SlotIdentifier.FromString('controller'), config_fast_tick_secs, 'uint32_t', 1)
    assert sg.get_tick('fast') == 1
Beispiel #10
0
def tick2_sg():
    """A sensorgrah that listens to tick1."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(system input 6 always) => counter 1 using copy_latest_a')
    sg.add_config(SlotIdentifier.FromString('controller'), config_tick2_secs,
                  'uint32_t', 2)

    return sg
Beispiel #11
0
def test_basic_sensorgraph():
    """Make sure we can parse, load and run a basic sensor graph."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node(
        '(input 1 always && input 2 when count >= 1) => unbuffered 1 using copy_all_a'
    )
    sg.process_input(DataStream.FromString('input 1'),
                     IOTileReading(0, 1, 1),
                     rpc_executor=None)
    sg.process_input(DataStream.FromString('input 2'),
                     IOTileReading(0, 1, 1),
                     rpc_executor=None)

    assert sg.sensor_log.inspect_last(
        DataStream.FromString('unbuffered 1')).value == 1
def test_string_generation():
    """Make sure we can print nodes."""

    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node(
        '(input 1 always && input 2 when count >= 1) => buffered node 1 using copy_all_a'
    )
    assert str(
        sg.nodes[-1]
    ) == u'(input 1 always && input 2 when count >= 1) => buffered 1 using copy_all_a'

    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)
    sg.add_node(
        '(input 1 when value < 0x10) => buffered node 1 using copy_all_a')
    assert str(sg.nodes[-1]
               ) == u'(input 1 when value < 16) => buffered 1 using copy_all_a'
Beispiel #13
0
class SensorGraphFileParser:
    """A parser that builds a sensor graph object from a text file specification."""
    def __init__(self):
        self._scope_stack = []
        self.statements = []
        self.sensor_graph = None

    def dump_tree(self, statement=None, indent_level=0):
        """Dump the AST for this parsed file.

        Args:
            statement (SensorGraphStatement): the statement to print
                if this function is called recursively.
            indent_level (int): The number of spaces to indent this
                statement.  Used for recursively printing blocks of
                statements.
        Returns:
            str: The AST for this parsed sg file as a nested
                tree with one node per line and blocks indented.
        """

        out = u""

        indent = u" " * indent_level

        if statement is None:
            for root_statement in self.statements:
                out += self.dump_tree(root_statement, indent_level)
        else:
            out += indent + str(statement) + u'\n'

            if len(statement.children) > 0:
                for child in statement.children:
                    out += self.dump_tree(child, indent_level=indent_level + 4)

        return out

    def parse_file(self, sg_file=None, data=None):
        """Parse a sensor graph file into an AST describing the file.

        This function builds the statements list for this parser.
        If you pass ``sg_file``, it will be interpreted as the path to a file
        to parse.  If you pass ``data`` it will be directly interpreted as the
        string to parse.
        """

        if sg_file is not None and data is not None:
            raise ArgumentError(
                "You must pass either a path to an sgf file or the sgf contents but not both"
            )

        if sg_file is None and data is None:
            raise ArgumentError(
                "You must pass either a path to an sgf file or the sgf contents, neither passed"
            )

        if sg_file is not None:
            try:
                with open(sg_file, "r") as inf:
                    data = inf.read()
            except IOError:
                raise ArgumentError("Could not read sensor graph file",
                                    path=sg_file)

        # convert tabs to spaces so our line numbers match correctly
        data = data.replace(u'\t', u'    ')

        lang = get_language()
        result = lang.parseString(data)

        for statement in result:
            parsed = self.parse_statement(statement, orig_contents=data)
            self.statements.append(parsed)

    def compile(self, model):
        """Compile this file into a SensorGraph.

        You must have preivously called parse_file to parse a
        sensor graph file into statements that are then executed
        by this command to build a sensor graph.

        The results are stored in self.sensor_graph and can be
        inspected before running optimization passes.

        Args:
            model (DeviceModel): The device model that we should compile
                this sensor graph for.
        """

        log = SensorLog(InMemoryStorageEngine(model), model)
        self.sensor_graph = SensorGraph(log, model)

        allocator = StreamAllocator(self.sensor_graph, model)

        self._scope_stack = []

        # Create a root scope
        root = RootScope(self.sensor_graph, allocator)
        self._scope_stack.append(root)

        for statement in self.statements:
            statement.execute(self.sensor_graph, self._scope_stack)

        self.sensor_graph.initialize_remaining_constants()
        self.sensor_graph.sort_nodes()

    def parse_statement(self, statement, orig_contents):
        """Parse a statement, possibly called recursively.

        Args:
            statement (int, ParseResult): The pyparsing parse result that
                contains one statement prepended with the match location
            orig_contents (str): The original contents of the file that we're
                parsing in case we need to convert an index into a line, column
                pair.

        Returns:
            SensorGraphStatement: The parsed statement.
        """

        children = []
        is_block = False
        name = statement.getName()

        # Recursively parse all children statements in a block
        # before parsing the block itself.
        # If this is a non-block statement, parse it using the statement
        # parser to figure out what specific statement it is before
        # processing it further.
        # This two step process produces better syntax error messsages
        if name == 'block':
            children_statements = statement[1]
            for child in children_statements:
                parsed = self.parse_statement(child,
                                              orig_contents=orig_contents)
                children.append(parsed)

            locn = statement[0]['location']
            statement = statement[0][1]
            name = statement.getName()
            is_block = True
        else:
            stmt_language = get_statement()
            locn = statement['location']
            statement = statement['match']
            statement_string = str(u"".join(statement.asList()))

            # Try to parse this generic statement into an actual statement.
            # Do this here in a separate step so we have good error messages when there
            # is a problem parsing a step.
            try:
                statement = stmt_language.parseString(statement_string)[0]
            except (pyparsing.ParseException,
                    pyparsing.ParseSyntaxException) as exc:
                raise SensorGraphSyntaxError(
                    "Error parsing statement in sensor graph file",
                    message=exc.msg,
                    line=pyparsing.line(locn, orig_contents).strip(),
                    line_number=pyparsing.lineno(locn, orig_contents),
                    column=pyparsing.col(locn, orig_contents))
            except SensorGraphSemanticError as exc:
                # Reraise semantic errors with line information
                raise SensorGraphSemanticError(
                    exc.msg,
                    line=pyparsing.line(locn, orig_contents).strip(),
                    line_number=pyparsing.lineno(locn, orig_contents),
                    **exc.params)

            name = statement.getName()

        if name not in statement_map:
            raise ArgumentError("Unknown statement in sensor graph file",
                                parsed_statement=statement,
                                name=name)

        # Save off our location information so we can give good error and warning information
        line = pyparsing.line(locn, orig_contents).strip()
        line_number = pyparsing.lineno(locn, orig_contents)
        column = pyparsing.col(locn, orig_contents)
        location_info = LocationInfo(line, line_number, column)

        if is_block:
            return statement_map[name](statement,
                                       children=children,
                                       location=location_info)

        return statement_map[name](statement, location_info)
Beispiel #14
0
class SensorGraphSubsystem(object):
    """Container for sensor graph state.

    There is a distinction between which sensor-graph is saved into persisted
    storage vs currently loaded and running.  This subsystem needs to be
    created with a shared mutex with the sensor_log subsystem to make sure
    all accesses are properly synchronized.
    """

    def __init__(self, sensor_log_system, stream_manager, model):
        self._logger = logging.getLogger(__name__)

        self._model = model

        self._sensor_log = sensor_log_system.storage
        self._allocate_id = sensor_log_system.allocate_id

        self._stream_manager = stream_manager
        self._rsl = sensor_log_system

        self.graph = SensorGraph(self._sensor_log, model=model, enforce_limits=True)

        self.persisted_exists = False
        self.persisted_nodes = []
        self.persisted_streamers = []
        self.persisted_constants = []

        self.streamer_acks = {}
        self.streamer_status = {}

        self.enabled = False

        # Clock manager linkage
        self.get_timestamp = lambda: 0

    def clear_to_reset(self, _config_vars):
        """Clear all volatile information across a reset.

        The reset behavior is that:
        - any persisted sensor_graph is loaded
        - if there is a persisted graph found, enabled is set to True
        - if there is a persisted graph found, reset readings are pushed
          into it.
        """

        self.graph.clear()

        if not self.persisted_exists:
            return

        for node in self.persisted_nodes:
            self.graph.add_node(node)

        for streamer_desc in self.persisted_streamers:
            streamer = streamer_descriptor.parse_string_descriptor(streamer_desc)
            self.graph.add_streamer(streamer)

        # Load in the constants
        for stream, reading in self.persisted_constants:
            self._sensor_log.push(stream, reading)

        self.enabled = True

        # Set up all streamers
        for index, value in viewitems(self.streamer_acks):
            self._seek_streamer(index, value)

        #FIXME: queue sending reset readings

    def process_input(self, encoded_stream, value):
        """Process or drop a graph input.

        This must not be called directly from an RPC but always via a deferred
        task.
        """

        if not self.enabled:
            return

        stream = DataStream.FromEncoded(encoded_stream)
        reading = IOTileReading(self.get_timestamp(), encoded_stream, value)

        self.graph.process_input(stream, reading, None)  #FIXME: add in an rpc executor for this device.

        self.process_streamers()

    def _seek_streamer(self, index, value):
        """Complex logic for actually seeking a streamer to a reading_id.

        This routine hides all of the gnarly logic of the various edge cases.
        In particular, the behavior depends on whether the reading id is found,
        and if it is found, whether it belongs to the indicated streamer or not.

        If not, the behavior depends on whether the sought reading it too high
        or too low.
        """

        highest_id = self._rsl.highest_stored_id()

        streamer = self.graph.streamers[index]
        if not streamer.walker.buffered:
            return _pack_sgerror(SensorLogError.CANNOT_USE_UNBUFFERED_STREAM)

        find_type = None
        try:
            exact = streamer.walker.seek(value, target='id')
            if exact:
                find_type = 'exact'
            else:
                find_type = 'other_stream'

        except UnresolvedIdentifierError:
            if value > highest_id:
                find_type = 'too_high'
            else:
                find_type = 'too_low'

        # If we found an exact match, move one beyond it

        if find_type == 'exact':
            try:
                streamer.walker.pop()
            except StreamEmptyError:
                pass

            error = Error.NO_ERROR
        elif find_type == 'too_high':
            streamer.walker.skip_all()
            error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
        elif find_type == 'too_low':
            streamer.walker.seek(0, target='offset')
            error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
        else:
            error = _pack_sgerror(SensorLogError.ID_FOUND_FOR_ANOTHER_STREAM)

        return error

    def acknowledge_streamer(self, index, ack, force):
        """Acknowledge a streamer value as received from the remote side."""

        if index >= len(self.graph.streamers):
            return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)

        old_ack = self.streamer_acks.get(index, 0)

        if ack != 0:
            if ack <= old_ack and not force:
                return _pack_sgerror(SensorGraphError.OLD_ACKNOWLEDGE_UPDATE)

            self.streamer_acks[index] = ack

        current_ack = self.streamer_acks.get(index, 0)
        return self._seek_streamer(index, current_ack)

    def _handle_streamer_finished(self, index, succeeded, highest_ack):
        """Callback when a streamer finishes processing."""

        self._logger.debug("Rolling back streamer %d after streaming, highest ack from streaming subsystem was %d", index, highest_ack)
        self.acknowledge_streamer(index, highest_ack, False)

    def process_streamers(self):
        """Check if any streamers should be handed to the stream manager."""

        # Check for any triggered streamers and pass them to stream manager
        in_progress = self._stream_manager.in_progress()
        triggered = self.graph.check_streamers(blacklist=in_progress)

        for streamer in triggered:
            self._stream_manager.process_streamer(streamer, callback=self._handle_streamer_finished)

    def trigger_streamer(self, index):
        """Pass a streamer to the stream manager if it has data."""

        self._logger.debug("trigger_streamer RPC called on streamer %d", index)

        if index >= len(self.graph.streamers):
            return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)

        if index in self._stream_manager.in_progress():
            return _pack_sgerror(SensorGraphError.STREAM_ALREADY_IN_PROGRESS)

        streamer = self.graph.streamers[index]
        if not streamer.triggered(manual=True):
            return _pack_sgerror(SensorGraphError.STREAMER_HAS_NO_NEW_DATA)

        self._logger.debug("calling mark_streamer on streamer %d from trigger_streamer RPC", index)
        self.graph.mark_streamer(index)

        self.process_streamers()

        return Error.NO_ERROR

    def count_nodes(self):
        """Count the number of nodes."""

        return len(self.graph.nodes)

    def persist(self):
        """Trigger saving the current sensorgraph to persistent storage."""

        self.persisted_nodes = self.graph.dump_nodes()
        self.persisted_streamers = self.graph.dump_streamers()
        self.persisted_exists = True
        self.persisted_constants = self._sensor_log.dump_constants()

    def reset(self):
        """Clear the sensorgraph from RAM and flash."""

        self.persisted_exists = False
        self.persisted_nodes = []
        self.persisted_streamers = []
        self.persisted_constants = []
        self.graph.clear()

        self.streamer_status = {}

    def add_node(self, binary_descriptor):
        """Add a node to the sensor_graph using a binary node descriptor.

        Args:
            binary_descriptor (bytes): An encoded binary node descriptor.

        Returns:
            int: A packed error code.
        """

        try:
            node_string = parse_binary_descriptor(binary_descriptor)
        except:
            self._logger.exception("Error parsing binary node descriptor: %s", binary_descriptor)
            return _pack_sgerror(SensorGraphError.INVALID_NODE_STREAM)  # FIXME: Actually provide the correct error codes here

        try:
            self.graph.add_node(node_string)
        except NodeConnectionError:
            return _pack_sgerror(SensorGraphError.STREAM_NOT_IN_USE)
        except ProcessingFunctionError:
            return _pack_sgerror(SensorGraphError.INVALID_PROCESSING_FUNCTION)
        except ResourceUsageError:
            return _pack_sgerror(SensorGraphError.NO_NODE_SPACE_AVAILABLE)

        return Error.NO_ERROR

    def add_streamer(self, binary_descriptor):
        """Add a streamer to the sensor_graph using a binary streamer descriptor.

        Args:
            binary_descriptor (bytes): An encoded binary streamer descriptor.

        Returns:
            int: A packed error code
        """

        streamer = streamer_descriptor.parse_binary_descriptor(binary_descriptor)

        try:
            self.graph.add_streamer(streamer)
            self.streamer_status[len(self.graph.streamers) - 1] = StreamerStatus()

            return Error.NO_ERROR
        except ResourceUsageError:
            return _pack_sgerror(SensorGraphError.NO_MORE_STREAMER_RESOURCES)

    def inspect_streamer(self, index):
        """Inspect the streamer at the given index."""

        if index >= len(self.graph.streamers):
            return [_pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED), b'\0'*14]

        return [Error.NO_ERROR, streamer_descriptor.create_binary_descriptor(self.graph.streamers[index])]

    def inspect_node(self, index):
        """Inspect the graph node at the given index."""

        if index >= len(self.graph.nodes):
            raise RPCErrorCode(6)  #FIXME: use actual error code here for UNKNOWN_ERROR status

        return create_binary_descriptor(str(self.graph.nodes[index]))

    def query_streamer(self, index):
        """Query the status of the streamer at the given index."""

        if index >= len(self.graph.streamers):
            return None

        info = self.streamer_status[index]
        highest_ack = self.streamer_acks.get(index, 0)

        return [info.last_attempt_time, info.last_success_time, info.last_error, highest_ack, info.last_status, info.attempt_number, info.comm_status]
Beispiel #15
0
def test_triggering_streamers():
    model = DeviceModel()
    log = SensorLog(model=model)
    sg = SensorGraph(log, model=model)

    sg.add_node('(input 1 always) => output 1 using copy_all_a')
    sg.add_node('(input 1 always) => output 2 using copy_all_a')

    sg.add_streamer(parse_string_descriptor('streamer on output 1'))
    sg.add_streamer(
        parse_string_descriptor('manual streamer on output 2 with streamer 0'))

    triggered = sg.check_streamers()
    assert len(triggered) == 0

    sg.process_input(DataStream.FromString('input 1'),
                     IOTileReading(0, 1, 1),
                     rpc_executor=None)
    triggered = sg.check_streamers()
    assert len(triggered) == 2