Esempio n. 1
0
    def test_defaults(self):
        """
        Tests whether creating a SensedEvent with minimum # arguments causes unexpected errors
        or leads to a state that will later cause Exceptions to be raised when manipulating the event.
        :return:
        """
        ev = SensedEvent.from_json(self.minimal_event.to_json())
        self.assertEqual(ev, self.minimal_event)

        # Should be able to specify None values for the data (unary event) or source (anonymous event?)
        ev = SensedEvent(data=1, source=None)
        self.assertEqual(SensedEvent.from_json(ev.to_json()), ev)
        ev = SensedEvent(data=None, source='temperature')
        self.assertEqual(SensedEvent.from_json(ev.to_json()), ev)
    def test_defaults(self):
        """
        Tests whether creating a SensedEvent with minimum # arguments causes unexpected errors
        or leads to a state that will later cause Exceptions to be raised when manipulating the event.
        :return:
        """
        ev = SensedEvent.from_json(self.minimal_event.to_json())
        self.assertEqual(ev, self.minimal_event)

        # Should be able to specify None values for the data (unary event) or source (anonymous event?)
        ev = SensedEvent(data=1, source=None)
        self.assertEqual(SensedEvent.from_json(ev.to_json()), ev)
        ev = SensedEvent(data=None, source='temperature')
        self.assertEqual(SensedEvent.from_json(ev.to_json()), ev)
Esempio n. 3
0
 def extract_event(request):
     """
     Extracts a SensedEvent from the payload of the request.  Tries to convert it to a remote event if it was
     left as a local one by setting the host/port/protocol.
     :param request:
     :type request: Request
     :return: the SensedEvent
     :rtype: SensedEvent
     """
     event = SensedEvent.from_json(request.payload)
     host, port = request.source
     try:
         # TODO: specify coaps if this event came through an encrypted channel?
         networks.util.process_remote_event(event,
                                            hostname=host,
                                            port=port,
                                            protocol='coap')
         # save the local resource URI so we know where exactly it entered our local client
         event.metadata['local_resource_uri'] = uri.build_uri(
             relative_path=request.uri_path)
         # QUESTION: should we do something with uri_query?  probably not used in a PUT/POST request...
     except BaseException as e:
         log.error(
             "error during converting local source to remote source in event extracted from CoAP request: %s"
             % e)
     return event
 def test_basic_json_encoding(self):
     """
     Tests whether SensedEvent encoding/decoding works as expected.
     :return:
     """
     encoded = self.event.to_json()
     decoded = SensedEvent.from_json(encoded)
     self.assertEqual(self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded))
Esempio n. 5
0
 def test_basic_json_encoding(self):
     """
     Tests whether SensedEvent encoding/decoding works as expected.
     :return:
     """
     encoded = self.event.to_json()
     decoded = SensedEvent.from_json(encoded)
     self.assertEqual(
         self.event, decoded,
         "encoding then decoding an event should give an essentially identical one back! have: %s and %s"
         % (self.event, decoded))
Esempio n. 6
0
    def test_json_encoding_excluded_fields(self):
        """
        Tests whether we can correctly encode a SensedEvent with the exclude_fields option and then correctly decode
        it back.
        :return:
        """

        encoded = self.event.to_json(exclude_fields=('schema', 'condition',
                                                     'misc', 'prio_value',
                                                     'prio_class'))
        decoded = SensedEvent.from_json(encoded)
        self.assertEqual(
            self.event, decoded,
            "encoding then decoding an event should give an essentially identical one back! have: %s and %s"
            % (self.event, decoded))

        # Now verify that excluding these fields at least allows us to decode the resulting encoded event, even though
        # we KNOW that they will not be truly equal.
        encoded = self.event.to_json(exclude_fields=('timestamp',
                                                     'event_type'))
        decoded = SensedEvent.from_json(encoded)  # should not raise error

        # Last, verify that excluding these fields DOES cause an error!
        encoded = self.event.to_json(exclude_fields=('device', ))
        with self.assertRaises(NotImplementedError):
            SensedEvent.from_json(encoded)
        encoded = self.event.to_json(exclude_fields=('value', ))
        with self.assertRaises(TypeError):
            SensedEvent.from_json(encoded)
 def make_event_with_raw_data(self, raw_data, priority=None):
     """
     This implementation assumes that the raw_data is a JSON-encoded SensedEvent already.
     :param raw_data:
     :param priority:
     :return:
     """
     # TODO: use priority? or log warning if someone tries to use it?
     try:
         ev = SensedEvent.from_json(raw_data)
         return ev
     except (ValueError, TypeError) as e:
         log.error("Failed to decode SensedEvent from: %s" % raw_data)
         raise e
    def _on_message(self, mqtt_client, payload, topic, qos, retain):
        """Publishes the SensedEvent internally upon receiving it"""

        try:
            event = SensedEvent.from_json(payload)
            # NOTE: we probably don't actually have to do this as its source should already be set,
            # but just in case we add additional handling later...
            process_remote_event(event)
        except BaseException as e:
            log.error("failed to parse SensedEvent from JSON payload: %s\nError was: %s" % (payload, e))
            return

        event.metadata['mqtt_topic'] = topic
        event.metadata['mqtt_broker'] = uri.build_uri(scheme='mqtt', path='broker', host=self._hostname, port=self._hostport)
        event.metadata['time_rcvd'] = SensedEvent.get_timestamp()
        self.publish(event)
        log.debug("MqttSensor received SensedEvent from topics %s: %s" % (topic, event))
 def extract_event(request):
     """
     Extracts a SensedEvent from the payload of the request.  Tries to convert it to a remote event if it was
     left as a local one by setting the host/port/protocol.
     :param request:
     :type request: Request
     :return: the SensedEvent
     :rtype: SensedEvent
     """
     event = SensedEvent.from_json(request.payload)
     host, port = request.source
     try:
         # TODO: specify coaps if this event came through an encrypted channel?
         networks.util.process_remote_event(event, hostname=host, port=port, protocol='coap')
         # save the local resource URI so we know where exactly it entered our local client
         event.metadata['local_resource_uri'] = uri.build_uri(relative_path=request.uri_path)
         # QUESTION: should we do something with uri_query?  probably not used in a PUT/POST request...
     except BaseException as e:
         log.error("error during converting local source to remote source in event extracted from CoAP request: %s" % e)
     return event
    def test_alert_compression(self, n_sensors=20, n_quakes=10, expect_one_packet=False):
        """Tests the method for compressing the data in a seismic alert event so that it fits in a single CoAP packet.
        Note that this test doesn't actually verify that the packet isn't fragmented on the wire, but rather relies on
        the CoAP helper function to check if it's too big."""

        # IDEA: create pick events, pass them to server, get its aggregated alert event, compress that, and then verify
        # that it fits into a single CoAP packet and actually contains recent events

        events = self._generate_events(n_quakes, n_sensors, event_type=SEISMIC_PICK_TOPIC, time_incr=1)
        # NOTE: because of the varying event ID lengths and our attempt to quickly cut out some that are over capacity,
        # we add some more events with longer name lengths to verify that part works.  You can enable logging and add
        # print statements to the function to manually verify this, though automated tests with more than 2 different
        # event ID lengths would be much better.  WARNING: this test wasn't catching a bug due to this fact!
        big_event_id = 'big_huge_accelerometer_thing%d'
        events.extend(self._generate_events(n_quakes, n_sensors / 4, event_type=SEISMIC_PICK_TOPIC, sensor_name=big_event_id))
        for e in events:
            self.srv.on_event(e, topic=e.topic)

        alert = self.srv.read()
        if not expect_one_packet:
            self.assertFalse(msg_fits_one_coap_packet(alert.to_json()), "alert msg already fits into one packet! add more pick events...")

        comp_alert = compress_alert_one_coap_packet(alert)
        self.assertTrue(msg_fits_one_coap_packet(comp_alert), "compressed alert data doesn't actually fit in one packet!")

        # double-check the contained events are newer ones
        decomp_alert = SensedEvent.from_json(comp_alert)
        # print "alert data was:", alert.data, "\nBut now is:", decomp_alert.data
        # print "seq #s after compression are:", [get_seq_from_event_id(eid) for eid in decomp_alert.data]
        self.assertIsInstance(decomp_alert.data, list)  # make sure it isn't just a single event string...
        self.assertTrue(any(get_seq_from_event_id(eid) == n_quakes - 1 for eid in decomp_alert.data))  # should keep newest

        # Don't run these checks for the tests that verify it works ok with only a few events
        if not expect_one_packet:
            self.assertGreaterEqual(len(decomp_alert.data), 5)  # where all the events at??
            self.assertTrue(all(get_seq_from_event_id(eid) > 0 for eid in decomp_alert.data))  # should throw out oldest
            # check to make sure we're using most of the packet.  Note that we assume here nquakes < 1000
            self.assertGreater(len(comp_alert), COAP_MAX_PAYLOAD_SIZE - (len(big_event_id) + 4))
Esempio n. 11
0
    def _on_message(self, mqtt_client, payload, topic, qos, retain):
        """Publishes the SensedEvent internally upon receiving it"""

        try:
            event = SensedEvent.from_json(payload)
            # NOTE: we probably don't actually have to do this as its source should already be set,
            # but just in case we add additional handling later...
            process_remote_event(event)
        except BaseException as e:
            log.error(
                "failed to parse SensedEvent from JSON payload: %s\nError was: %s"
                % (payload, e))
            return

        event.metadata['mqtt_topic'] = topic
        event.metadata['mqtt_broker'] = uri.build_uri(scheme='mqtt',
                                                      path='broker',
                                                      host=self._hostname,
                                                      port=self._hostport)
        event.metadata['time_rcvd'] = SensedEvent.get_timestamp()
        self.publish(event)
        log.debug("MqttSensor received SensedEvent from topics %s: %s" %
                  (topic, event))
    def test_json_encoding_excluded_fields(self):
        """
        Tests whether we can correctly encode a SensedEvent with the exclude_fields option and then correctly decode
        it back.
        :return:
        """

        encoded = self.event.to_json(exclude_fields=('schema', 'condition', 'misc', 'prio_value', 'prio_class'))
        decoded = SensedEvent.from_json(encoded)
        self.assertEqual(self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded))

        # Now verify that excluding these fields at least allows us to decode the resulting encoded event, even though
        # we KNOW that they will not be truly equal.
        encoded = self.event.to_json(exclude_fields=('timestamp', 'event_type'))
        decoded = SensedEvent.from_json(encoded)  # should not raise error

        # Last, verify that excluding these fields DOES cause an error!
        encoded = self.event.to_json(exclude_fields=('device',))
        with self.assertRaises(NotImplementedError):
            SensedEvent.from_json(encoded)
        encoded = self.event.to_json(exclude_fields=('value',))
        with self.assertRaises(TypeError):
            SensedEvent.from_json(encoded)