Esempio n. 1
0
    def get_sensed_events_from_publications(cls,
                                            publications,
                                            init_time=None,
                                            source=None,
                                            metadata=None):
        """
        Converts the output from generate_publications() into SensedEvents
        :param publications:
        :param init_time: the time until event publication is added to this to create a complete timestamp (default is now)
        :param source: optional source to set in the SensedEvent
        :param metadata: optional metadata to set
        :rtype: generator[SensedEvent]
        :return:
        """

        if init_time is None:
            init_time = SensedEvent.get_timestamp()

        for pub in publications:
            init_time += pub.time
            yield SensedEvent(pub.data,
                              source=source,
                              event_type=pub.topic,
                              timestamp=init_time,
                              metadata=metadata)
Esempio n. 2
0
def process_remote_event(event,
                         protocol=None,
                         hostname=None,
                         port=None,
                         relay_uri=None):
    """
    Process a SensedEvent from a remote node by:
    1) ensuring its source points to the remote that created it,
    2) saving the time this event was received (right now)
    3) optionally storing the relay_uri we received this event from e.g. resource URI (if different from remote URI), broker URI, etc.
    NOTE: if relay_uri is specified but the other parameters are not, they'll be extracted from relay_uri so
        you'll need to either set relay_uri or at least hostname/port!
    :param event:
    :type event: scale_client.core.SensedEvent
    :param hostname: the remote host
    :param port: remote port from which the event came
    :param relay_uri: optional URI specifying the remote entity from which this event just came
        (e.g. broker or CoAP resource) as opposed to the entity that originally created it
    :param protocol: name of the protocol to include in the URI
    """

    # If the event isn't already formatted as from a legitimate remote source (i.e. remote forgot to convert the source),
    # tag it as coming from the specified remote so we don't interpret it as a local one and e.g. send it back there.
    if event.is_local or not uri.is_host_known(event.source):
        # try to extract unspecified parameters
        parsed_relay_uri = uri.parse_uri(
            relay_uri) if relay_uri is not None else None
        if parsed_relay_uri and not hostname:
            hostname = parsed_relay_uri.host
        if parsed_relay_uri and not port:
            port = parsed_relay_uri.port
        if parsed_relay_uri and not protocol:
            protocol = parsed_relay_uri.getscheme()

        # verify we have enough information to proceed
        if not hostname or not (port or protocol):
            raise ValueError(
                "failed to specify enough fields to at least identify protocol/port and host!"
                " host=%s, port=%s, protocol=%s, relay_uri=%s" %
                (hostname, port, protocol, relay_uri))

        # ENHANCE: perhaps we want to allow remote to specify a different protocol without knowing its IP address?
        # ENHANCE: perhaps we should do some validation as some networks could make this a problem e.g. a NAT
        event.source = uri.get_remote_uri(event.source,
                                          protocol=protocol,
                                          host=hostname,
                                          port=port)

    # Assume the receive time is right now:
    # NOTE: in case the event was relayed to us from an intermediary, we should overwrite the time_rcvd!!
    event.metadata['time_rcvd'] = SensedEvent.get_timestamp()

    # In case the remote's original URI is different than how we got it from the CoAP resource:
    if relay_uri and relay_uri != event.source:
        event.metadata.setdefault('relay_uri', relay_uri)
    def test_nevents(self):

        # FIRST TEST: basic periodic publishing with limited # events
        # NOTE: make sure you bound the number of events generated or the SCALE client won't stop running!
        gen_cfg = dict(topic='fire', publication_period=.5, nevents=10)

        client = ScaleClient(quit_time=self.quit_time, raise_errors=True)
        broker = client.setup_broker()
        pub = RandomVirtualSensor(broker, event_generator=gen_cfg)
        stats_sub = StatisticsApplication(broker,
                                          subscriptions=('fire', 'ice'))
        events_sub = EventStoringApplication(broker,
                                             subscriptions=('fire', 'ice'))

        # get time of start and end; ensure all events have increasing timestamps between these values
        start_time = SensedEvent.get_timestamp()
        client.run()
        end_time = SensedEvent.get_timestamp()

        # verify # events generated
        self.assertEqual(stats_sub.get_stats('fire', 'count'), 10)
        self.assertGreater(len(events_sub.events), 0)

        for ev in events_sub.events:
            self.assertLess(start_time, ev.timestamp)
            self.assertLess(ev.timestamp, end_time)

        ## EDGE CASE: no events generated with 0 total events

        gen_cfg = dict(topic='fire', publication_period=.5, nevents=0)

        client = ScaleClient(quit_time=self.quit_time, raise_errors=True)
        broker = client.setup_broker()
        pub = RandomVirtualSensor(broker, event_generator=gen_cfg)
        stats_sub = StatisticsApplication(broker,
                                          subscriptions=('fire', 'ice'))
        client.run()

        # verify no events generated
        self.assertEqual(stats_sub.get_stats('fire', 'count'), 0)
    def _on_message(self, mqtt_client, payload, topic, qos, retain):
        """Publishes the SensedEvent internally upon receiving it"""

        try:
            event = SensedEvent.from_json(payload)
            # NOTE: we probably don't actually have to do this as its source should already be set,
            # but just in case we add additional handling later...
            process_remote_event(event)
        except BaseException as e:
            log.error("failed to parse SensedEvent from JSON payload: %s\nError was: %s" % (payload, e))
            return

        event.metadata['mqtt_topic'] = topic
        event.metadata['mqtt_broker'] = uri.build_uri(scheme='mqtt', path='broker', host=self._hostname, port=self._hostport)
        event.metadata['time_rcvd'] = SensedEvent.get_timestamp()
        self.publish(event)
        log.debug("MqttSensor received SensedEvent from topics %s: %s" % (topic, event))
    def test_async(self):

        # THIRD TEST: asynchronous events, which we verify worked by setting appropriate bounds on pub times
        duration = 5
        gen_cfg = dict(topic='fire',
                       publication_period=dict(dist='exp',
                                               args=(0.5, ),
                                               lbound=0.5,
                                               ubound=1),
                       total_time=duration)

        client = ScaleClient(quit_time=duration + 1, raise_errors=True)
        broker = client.setup_broker()
        pub = RandomVirtualSensor(broker, event_generator=gen_cfg)
        stats_sub = StatisticsApplication(broker,
                                          subscriptions=('fire', 'ice'))
        events_sub = EventStoringApplication(broker,
                                             subscriptions=('fire', 'ice'))

        # get time of start and end; ensure all events have increasing timestamps between these values
        start_time = SensedEvent.get_timestamp()
        client.run()

        # verify expected # events generated
        self.assertGreaterEqual(stats_sub.get_stats('fire', 'count'), 4)
        self.assertLessEqual(stats_sub.get_stats('fire', 'count'), 11)

        # Verify times are as expected
        # ENHANCE: how to do this automatically?
        print "MANUALLY verify these pub times look async:"
        ev_times = [ev.timestamp - start_time for ev in events_sub.events]
        for i in range(len(ev_times))[1:]:
            ev_times[i] -= ev_times[i - 1]
        print ev_times

        last_time = events_sub.events[0].timestamp
        for ev in events_sub.events[1:]:
            self.assertLess(start_time, ev.timestamp)
            # give a little lag time for the last event
            self.assertLess(ev.timestamp, start_time + duration + 0.2)
            # ensure time diff is within our bounds (roughly)
            this_time = ev.timestamp
            self.assertLess(this_time - last_time, 1.1)
            self.assertGreater(this_time - last_time, 0.5)
            last_time = this_time
Esempio n. 6
0
def process_remote_event(event, protocol=None, hostname=None, port=None, relay_uri=None):
    """
    Process a SensedEvent from a remote node by:
    1) ensuring its source points to the remote that created it,
    2) saving the time this event was received (right now)
    3) optionally storing the relay_uri we received this event from e.g. resource URI (if different from remote URI), broker URI, etc.
    NOTE: if relay_uri is specified but the other parameters are not, they'll be extracted from relay_uri so
        you'll need to either set relay_uri or at least hostname/port!
    :param event:
    :type event: scale_client.core.SensedEvent
    :param hostname: the remote host
    :param port: remote port from which the event came
    :param relay_uri: optional URI specifying the remote entity from which this event just came
        (e.g. broker or CoAP resource) as opposed to the entity that originally created it
    :param protocol: name of the protocol to include in the URI
    """

    # If the event isn't already formatted as from a legitimate remote source (i.e. remote forgot to convert the source),
    # tag it as coming from the specified remote so we don't interpret it as a local one and e.g. send it back there.
    if event.is_local or not uri.is_host_known(event.source):
        # try to extract unspecified parameters
        parsed_relay_uri = uri.parse_uri(relay_uri) if relay_uri is not None else None
        if parsed_relay_uri and not hostname:
            hostname = parsed_relay_uri.host
        if parsed_relay_uri and not port:
            port = parsed_relay_uri.port
        if parsed_relay_uri and not protocol:
            protocol = parsed_relay_uri.getscheme()

        # verify we have enough information to proceed
        if not hostname or not (port or protocol):
            raise ValueError("failed to specify enough fields to at least identify protocol/port and host!"
                             " host=%s, port=%s, protocol=%s, relay_uri=%s" % (hostname, port, protocol, relay_uri))

        # ENHANCE: perhaps we want to allow remote to specify a different protocol without knowing its IP address?
        # ENHANCE: perhaps we should do some validation as some networks could make this a problem e.g. a NAT
        event.source = uri.get_remote_uri(event.source, protocol=protocol, host=hostname, port=port)

    # Assume the receive time is right now:
    # NOTE: in case the event was relayed to us from an intermediary, we should overwrite the time_rcvd!!
    event.metadata['time_rcvd'] = SensedEvent.get_timestamp()

    # In case the remote's original URI is different than how we got it from the CoAP resource:
    if relay_uri and relay_uri != event.source:
        event.metadata.setdefault('relay_uri', relay_uri)
    def test_total_time(self):

        # SECOND TEST: limited time duration of published events
        duration = 3
        gen_cfg = dict(topic='fire',
                       publication_period=.5,
                       total_time=duration)

        client = ScaleClient(quit_time=self.quit_time, raise_errors=True)
        broker = client.setup_broker()
        pub = RandomVirtualSensor(broker, event_generator=gen_cfg)
        stats_sub = StatisticsApplication(broker,
                                          subscriptions=('fire', 'ice'))
        events_sub = EventStoringApplication(broker,
                                             subscriptions=('fire', 'ice'))

        # get time of start and end; ensure all events have increasing timestamps between these values
        start_time = SensedEvent.get_timestamp()
        client.run()

        # verify SOME events generated
        self.assertGreater(stats_sub.get_stats('fire', 'count'), 3)

        # Verify times are as expected
        for ev in events_sub.events:
            self.assertLess(start_time, ev.timestamp)
            # give a little lag time for the last event
            self.assertLess(ev.timestamp, start_time + duration + 0.2)

        ## EDGE CASE: no events generated with 0 time covered

        gen_cfg = dict(topic='fire', publication_period=.5, total_time=0)

        client = ScaleClient(quit_time=self.quit_time, raise_errors=True)
        broker = client.setup_broker()
        pub = RandomVirtualSensor(broker, event_generator=gen_cfg)
        stats_sub = StatisticsApplication(broker,
                                          subscriptions=('fire', 'ice'))
        client.run()

        # verify no events generated
        self.assertEqual(stats_sub.get_stats('fire', 'count'), 0)
Esempio n. 8
0
    def _on_message(self, mqtt_client, payload, topic, qos, retain):
        """Publishes the SensedEvent internally upon receiving it"""

        try:
            event = SensedEvent.from_json(payload)
            # NOTE: we probably don't actually have to do this as its source should already be set,
            # but just in case we add additional handling later...
            process_remote_event(event)
        except BaseException as e:
            log.error(
                "failed to parse SensedEvent from JSON payload: %s\nError was: %s"
                % (payload, e))
            return

        event.metadata['mqtt_topic'] = topic
        event.metadata['mqtt_broker'] = uri.build_uri(scheme='mqtt',
                                                      path='broker',
                                                      host=self._hostname,
                                                      port=self._hostport)
        event.metadata['time_rcvd'] = SensedEvent.get_timestamp()
        self.publish(event)
        log.debug("MqttSensor received SensedEvent from topics %s: %s" %
                  (topic, event))