def get_sensed_events_from_publications(cls, publications, init_time=None, source=None, metadata=None): """ Converts the output from generate_publications() into SensedEvents :param publications: :param init_time: the time until event publication is added to this to create a complete timestamp (default is now) :param source: optional source to set in the SensedEvent :param metadata: optional metadata to set :rtype: generator[SensedEvent] :return: """ if init_time is None: init_time = SensedEvent.get_timestamp() for pub in publications: init_time += pub.time yield SensedEvent(pub.data, source=source, event_type=pub.topic, timestamp=init_time, metadata=metadata)
def publish_neighbors_avarage_temp(self): # TODO: all of this should be replaced with a proper call to creating a SensedEvent (using the newer API) data = {} data['event'] = 'MeshSensor' data['event_type'] = 'average_temperature' data['value'] = self.relayedSensedEvents['temperature'][ 'neighbors_average'] data['detail'] = {} data['detail']['temp_count'] = self.relayedSensedEvents['temperature'][ 'neighbors_counter'] data['detail']['temp_sum'] = round( self.relayedSensedEvents['temperature']['neighbors_sum']) try: encoded_data = json.dumps(data) event = SensedEvent(data, data['event'], 5) self.publish(event) log.info( 'Published neighbors avarage temperature to application. Data: ' + encoded_data) return True except: log.error('Invalid average temparature encoded data string') return False
def _cron(self): if self._db is None: if not self._try_connect(): return # Clean up if self._clean_timer is None or self._clean_timer + self._clean_timeout < time.time(): self._clean_up() self._clean_timer = time.time() # Check for available publishers # Will not check for Internet access if not self._puba: log.info("no available publisher reported") return res_list = None id_list = [] event_list = [] self._db_lock.acquire() try: res_list = self.EventRecord.select().where(self.EventRecord.upload_time == None) for rec in res_list: event = SensedEvent(data=json.loads(rec.value_json), source=rec.sensor, priority=rec.priority, event_type=rec.event, timestamp=rec.timestamp, condition=json.loads(rec.condition) if rec.condition else None, location=json.loads(rec.geotag) if rec.geotag else None, metadata={"table_id": rec.id, "upload_time": rec.upload_time}) id_list.append(rec.id) event_list.append(event) except peewee.OperationalError, err: log.error(str(err)) self._db = None return
def test_schema_versions(self): """ Tests whether events formatted from different schema versions are compatible with the current data model. :return: """ source_device = "scale-local:scale/devices/temperature" v1_map = {"d" : {"event" : "temperature", "value" : 55.5, "units" : "celsius", "timestamp" : 12345678, "device" : source_device, "location" : {"lat" : 33.3, "lon" : "-71"}, "condition" : {"threshold" : {"operator" : ">", "value" : "95"}}, "prio_class": "high", "prio_value": 2, "schema" : "www.schema.org/scale_sensors.1.0.whatever", } } v1_event = SensedEvent.from_map(v1_map) self.assertEqual(v1_event.event_type, 'temperature') self.assertEqual(v1_event.data, 55.5) self.assertEqual(v1_event.priority, 2) self.assertEqual(v1_event.source, source_device) # Now the other way around: dumping to a map, first by looking at JSON encoding... v1_json = json.dumps(v1_map, sort_keys=True) manual_v1_json = json.dumps(json.loads(v1_event.to_json()), sort_keys=True) self.assertEqual(manual_v1_json, v1_json) new_v1_map = v1_event.to_map() self.assertEqual(v1_map, new_v1_map, "making into v1.0 schema map didn't produce identical dict: %s" % new_v1_map)
def extract_columns(self, data, parse_metadata=True): """ Extracts the important columns from the given list of SensedEvents :param data: :type data: list[dict] :param parse_metadata: if True (default), include columns for the metadata :return: """ # QUESTION: how to handle empty results??? events = [SensedEvent.from_map(e) for e in data] cols = {'topic': [ev.topic for ev in events], 'time_sent': [ev.timestamp for ev in events], # TODO: might not even want this? what to do with it? the 'scale-local:/' part makes it less useful... 'source': [ev.source for ev in events], 'value': [ev.data for ev in events], } # Include the metadata in case it has something valuable for us. # We have to gather up all unique keys first to ensure each row has all the needed columns so they line up. metadata_keys = set() for ev in events: for k in ev.metadata: metadata_keys.add(k) cols.update({ k: [ev.metadata.get(k) for ev in events] for k in metadata_keys }) return cols
def convert_to_sensed_event(self, relayedSensedEvent): structured_data = {"event": relayedSensedEvent.sensor, "value": relayedSensedEvent.data['value'], "published": relayedSensedEvent.published} event = SensedEvent(relayedSensedEvent, structured_data, relayedSensedEvent.priority, relayedSensedEvent.timestamp) return event
def extract_columns(self, data, parse_metadata=True): """ Extracts the important columns from the given list of SensedEvents :param data: :type data: list[dict] :param parse_metadata: if True (default), include columns for the metadata :return: """ # QUESTION: how to handle empty results??? events = [SensedEvent.from_map(e) for e in data] cols = { 'topic': [ev.topic for ev in events], 'time_sent': [ev.timestamp for ev in events], # TODO: might not even want this? what to do with it? the 'scale-local:/' part makes it less useful... 'source': [ev.source for ev in events], 'value': [ev.data for ev in events], } # Include the metadata in case it has something valuable for us. # We have to gather up all unique keys first to ensure each row has all the needed columns so they line up. metadata_keys = set() for ev in events: for k in ev.metadata: metadata_keys.add(k) cols.update( {k: [ev.metadata.get(k) for ev in events] for k in metadata_keys}) return cols
def extract_event(request): """ Extracts a SensedEvent from the payload of the request. Tries to convert it to a remote event if it was left as a local one by setting the host/port/protocol. :param request: :type request: Request :return: the SensedEvent :rtype: SensedEvent """ event = SensedEvent.from_json(request.payload) host, port = request.source try: # TODO: specify coaps if this event came through an encrypted channel? networks.util.process_remote_event(event, hostname=host, port=port, protocol='coap') # save the local resource URI so we know where exactly it entered our local client event.metadata['local_resource_uri'] = uri.build_uri( relative_path=request.uri_path) # QUESTION: should we do something with uri_query? probably not used in a PUT/POST request... except BaseException as e: log.error( "error during converting local source to remote source in event extracted from CoAP request: %s" % e) return event
def _cron(self): if self._db is None: if not self._try_connect(): return # Clean up if self._clean_timer is None or self._clean_timer + self._clean_timeout < time.time(): self._clean_up() self._clean_timer = time.time() # Check for available publishers # Will not check for Internet access if not self._puba: log.info("no available publisher reported") return res_list = None id_list = [] event_list = [] self._db_lock.acquire() try: res_list = self.EventRecord.select().where(self.EventRecord.upload_time == None) for rec in res_list: structured_data = { "event": rec.event, "value": json.loads(rec.value_json) } if rec.geotag is not None: structured_data["geotag"] = json.loads(rec.geotag) if rec.condition is not None: structured_data["condition"] = json.loads(rec.condition) event = SensedEvent( rec.sensor, structured_data, rec.priority, timestamp = rec.timestamp ) event.db_record = { "table_id": rec.id, "upload_time": rec.upload_time # should be None } id_list.append(rec.id) event_list.append(event) except peewee.OperationalError, err: log.error(str(err)) self._db = None return
def _on_message(self, mqtt_client, payload, topic, qos, retain): """Publishes the SensedEvent internally upon receiving it""" try: event = SensedEvent.from_json(payload) # NOTE: we probably don't actually have to do this as its source should already be set, # but just in case we add additional handling later... process_remote_event(event) except BaseException as e: log.error("failed to parse SensedEvent from JSON payload: %s\nError was: %s" % (payload, e)) return event.metadata['mqtt_topic'] = topic event.metadata['mqtt_broker'] = uri.build_uri(scheme='mqtt', path='broker', host=self._hostname, port=self._hostport) event.metadata['time_rcvd'] = SensedEvent.get_timestamp() self.publish(event) log.debug("MqttSensor received SensedEvent from topics %s: %s" % (topic, event))
def test_basic_json_encoding(self): """ Tests whether SensedEvent encoding/decoding works as expected. :return: """ encoded = self.event.to_json() decoded = SensedEvent.from_json(encoded) self.assertEqual(self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded))
def test_defaults(self): """ Tests whether creating a SensedEvent with minimum # arguments causes unexpected errors or leads to a state that will later cause Exceptions to be raised when manipulating the event. :return: """ ev = SensedEvent.from_json(self.minimal_event.to_json()) self.assertEqual(ev, self.minimal_event) # Should be able to specify None values for the data (unary event) or source (anonymous event?) ev = SensedEvent(data=1, source=None) self.assertEqual(SensedEvent.from_json(ev.to_json()), ev) ev = SensedEvent(data=None, source='temperature') self.assertEqual(SensedEvent.from_json(ev.to_json()), ev)
def convert_to_sensed_event(self, relayedSensedEvent): # TODO: verify these parameters are correct and propagate them to the rest of the mesh logic event = SensedEvent( data=relayedSensedEvent.data, source=relayedSensedEvent, priority=relayedSensedEvent.priority, event_type=relayedSensedEvent.source, timestamp=relayedSensedEvent.timestamp, metadata={"published": relayedSensedEvent.published}) return event
def process_remote_event(event, protocol=None, hostname=None, port=None, relay_uri=None): """ Process a SensedEvent from a remote node by: 1) ensuring its source points to the remote that created it, 2) saving the time this event was received (right now) 3) optionally storing the relay_uri we received this event from e.g. resource URI (if different from remote URI), broker URI, etc. NOTE: if relay_uri is specified but the other parameters are not, they'll be extracted from relay_uri so you'll need to either set relay_uri or at least hostname/port! :param event: :type event: scale_client.core.SensedEvent :param hostname: the remote host :param port: remote port from which the event came :param relay_uri: optional URI specifying the remote entity from which this event just came (e.g. broker or CoAP resource) as opposed to the entity that originally created it :param protocol: name of the protocol to include in the URI """ # If the event isn't already formatted as from a legitimate remote source (i.e. remote forgot to convert the source), # tag it as coming from the specified remote so we don't interpret it as a local one and e.g. send it back there. if event.is_local or not uri.is_host_known(event.source): # try to extract unspecified parameters parsed_relay_uri = uri.parse_uri( relay_uri) if relay_uri is not None else None if parsed_relay_uri and not hostname: hostname = parsed_relay_uri.host if parsed_relay_uri and not port: port = parsed_relay_uri.port if parsed_relay_uri and not protocol: protocol = parsed_relay_uri.getscheme() # verify we have enough information to proceed if not hostname or not (port or protocol): raise ValueError( "failed to specify enough fields to at least identify protocol/port and host!" " host=%s, port=%s, protocol=%s, relay_uri=%s" % (hostname, port, protocol, relay_uri)) # ENHANCE: perhaps we want to allow remote to specify a different protocol without knowing its IP address? # ENHANCE: perhaps we should do some validation as some networks could make this a problem e.g. a NAT event.source = uri.get_remote_uri(event.source, protocol=protocol, host=hostname, port=port) # Assume the receive time is right now: # NOTE: in case the event was relayed to us from an intermediary, we should overwrite the time_rcvd!! event.metadata['time_rcvd'] = SensedEvent.get_timestamp() # In case the remote's original URI is different than how we got it from the CoAP resource: if relay_uri and relay_uri != event.source: event.metadata.setdefault('relay_uri', relay_uri)
def test_basic_json_encoding(self): """ Tests whether SensedEvent encoding/decoding works as expected. :return: """ encoded = self.event.to_json() decoded = SensedEvent.from_json(encoded) self.assertEqual( self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded))
def test_is_local(self): """ Tests whether we can properly determine whether a SensedEvent came from our local node or not. :return: """ self.assertTrue(self.minimal_event.is_local, "simple string source should be considered local!") self.assertFalse( SensedEvent(data=1, source="coap://1.1.1.1/scale/events/temp").is_local, "event from coap source is not local!")
def test_json_encoding_excluded_fields(self): """ Tests whether we can correctly encode a SensedEvent with the exclude_fields option and then correctly decode it back. :return: """ encoded = self.event.to_json(exclude_fields=('schema', 'condition', 'misc', 'prio_value', 'prio_class')) decoded = SensedEvent.from_json(encoded) self.assertEqual( self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded)) # Now verify that excluding these fields at least allows us to decode the resulting encoded event, even though # we KNOW that they will not be truly equal. encoded = self.event.to_json(exclude_fields=('timestamp', 'event_type')) decoded = SensedEvent.from_json(encoded) # should not raise error # Last, verify that excluding these fields DOES cause an error! encoded = self.event.to_json(exclude_fields=('device', )) with self.assertRaises(NotImplementedError): SensedEvent.from_json(encoded) encoded = self.event.to_json(exclude_fields=('value', )) with self.assertRaises(TypeError): SensedEvent.from_json(encoded)
def test_nevents(self): # FIRST TEST: basic periodic publishing with limited # events # NOTE: make sure you bound the number of events generated or the SCALE client won't stop running! gen_cfg = dict(topic='fire', publication_period=.5, nevents=10) client = ScaleClient(quit_time=self.quit_time, raise_errors=True) broker = client.setup_broker() pub = RandomVirtualSensor(broker, event_generator=gen_cfg) stats_sub = StatisticsApplication(broker, subscriptions=('fire', 'ice')) events_sub = EventStoringApplication(broker, subscriptions=('fire', 'ice')) # get time of start and end; ensure all events have increasing timestamps between these values start_time = SensedEvent.get_timestamp() client.run() end_time = SensedEvent.get_timestamp() # verify # events generated self.assertEqual(stats_sub.get_stats('fire', 'count'), 10) self.assertGreater(len(events_sub.events), 0) for ev in events_sub.events: self.assertLess(start_time, ev.timestamp) self.assertLess(ev.timestamp, end_time) ## EDGE CASE: no events generated with 0 total events gen_cfg = dict(topic='fire', publication_period=.5, nevents=0) client = ScaleClient(quit_time=self.quit_time, raise_errors=True) broker = client.setup_broker() pub = RandomVirtualSensor(broker, event_generator=gen_cfg) stats_sub = StatisticsApplication(broker, subscriptions=('fire', 'ice')) client.run() # verify no events generated self.assertEqual(stats_sub.get_stats('fire', 'count'), 0)
def make_event_with_raw_data(self, raw_data, priority=None): """ This implementation assumes that the raw_data is a JSON-encoded SensedEvent already. :param raw_data: :param priority: :return: """ # TODO: use priority? or log warning if someone tries to use it? try: ev = SensedEvent.from_json(raw_data) return ev except (ValueError, TypeError) as e: log.error("Failed to decode SensedEvent from: %s" % raw_data) raise e
def _on_message(self, mqtt_client, payload, topic, qos, retain): """Publishes the SensedEvent internally upon receiving it""" try: event = SensedEvent.from_json(payload) # NOTE: we probably don't actually have to do this as its source should already be set, # but just in case we add additional handling later... process_remote_event(event) except BaseException as e: log.error( "failed to parse SensedEvent from JSON payload: %s\nError was: %s" % (payload, e)) return event.metadata['mqtt_topic'] = topic event.metadata['mqtt_broker'] = uri.build_uri(scheme='mqtt', path='broker', host=self._hostname, port=self._hostport) event.metadata['time_rcvd'] = SensedEvent.get_timestamp() self.publish(event) log.debug("MqttSensor received SensedEvent from topics %s: %s" % (topic, event))
def extract_columns(cls, data): """ Extracts the important columns from the given list of SensedEvents :param data: :return: """ events = [SensedEvent.from_map(e) for e in data] # XXX: to ensure this host shows up in at least one row, we add a dummy event when it had none if not events: events = [ SensedEvent(data=True, source="no source", event_type=EMPTY_OUTPUT_EVENT_TYPE) ] cols = { 'topic': [ev.topic for ev in events], 'time_sent': [ev.timestamp for ev in events], # TODO: might not even want this? what to do with it? the 'scale-local:/' part makes it less useful... 'source': [ev.source for ev in events], # all the events these types receive are seq#s; could move this down to derived class if needed... 'seq': [ev.data for ev in events], } return cols
def make_event_with_raw_data(self, raw_data, priority=None): """ This function returns a new SensedEvent that contains the raw data specified packaged in the SensedEvent.data instance variable. Override this method to tweak your custom SensedEvent. :param data: raw data string or bytes :return: SensedEvent """ if priority is None: priority = self.__class__.DEFAULT_PRIORITY structured_data = {"event": self.get_type(), "value": raw_data} event = SensedEvent(self.get_type() if self.device is None else self.device.device, structured_data, priority) return event
def extract_columns(cls, data): """We need to extract the time each pick was received at the server for processing""" cols = super(ServerOutput, cls).extract_columns(data) # XXX: dummy data to ensure empty results parse okay and create a row if not data: cols['time_rcvd'] = [0] else: cols['time_rcvd'] = [ cls.get_aggregation_time(ev) for ev in (SensedEvent.from_map(e) for e in data) ] cols['src_ip'] = [ get_hostname_from_path(src) for src in cols.pop('source') ] return cols
def test_schema_versions(self): """ Tests whether events formatted from different schema versions are compatible with the current data model. :return: """ source_device = "scale-local:scale/devices/temperature" v1_map = { "d": { "event": "temperature", "value": 55.5, "units": "celsius", "timestamp": 12345678, "device": source_device, "location": { "lat": 33.3, "lon": "-71" }, "condition": { "threshold": { "operator": ">", "value": "95" } }, "prio_class": "high", "prio_value": 2, "schema": "www.schema.org/scale_sensors.1.0.whatever", } } v1_event = SensedEvent.from_map(v1_map) self.assertEqual(v1_event.event_type, 'temperature') self.assertEqual(v1_event.data, 55.5) self.assertEqual(v1_event.priority, 2) self.assertEqual(v1_event.source, source_device) # Now the other way around: dumping to a map, first by looking at JSON encoding... v1_json = json.dumps(v1_map, sort_keys=True) manual_v1_json = json.dumps(json.loads(v1_event.to_json()), sort_keys=True) self.assertEqual(manual_v1_json, v1_json) new_v1_map = v1_event.to_map() self.assertEqual( v1_map, new_v1_map, "making into v1.0 schema map didn't produce identical dict: %s" % new_v1_map)
def process_remote_event(event, protocol=None, hostname=None, port=None, relay_uri=None): """ Process a SensedEvent from a remote node by: 1) ensuring its source points to the remote that created it, 2) saving the time this event was received (right now) 3) optionally storing the relay_uri we received this event from e.g. resource URI (if different from remote URI), broker URI, etc. NOTE: if relay_uri is specified but the other parameters are not, they'll be extracted from relay_uri so you'll need to either set relay_uri or at least hostname/port! :param event: :type event: scale_client.core.SensedEvent :param hostname: the remote host :param port: remote port from which the event came :param relay_uri: optional URI specifying the remote entity from which this event just came (e.g. broker or CoAP resource) as opposed to the entity that originally created it :param protocol: name of the protocol to include in the URI """ # If the event isn't already formatted as from a legitimate remote source (i.e. remote forgot to convert the source), # tag it as coming from the specified remote so we don't interpret it as a local one and e.g. send it back there. if event.is_local or not uri.is_host_known(event.source): # try to extract unspecified parameters parsed_relay_uri = uri.parse_uri(relay_uri) if relay_uri is not None else None if parsed_relay_uri and not hostname: hostname = parsed_relay_uri.host if parsed_relay_uri and not port: port = parsed_relay_uri.port if parsed_relay_uri and not protocol: protocol = parsed_relay_uri.getscheme() # verify we have enough information to proceed if not hostname or not (port or protocol): raise ValueError("failed to specify enough fields to at least identify protocol/port and host!" " host=%s, port=%s, protocol=%s, relay_uri=%s" % (hostname, port, protocol, relay_uri)) # ENHANCE: perhaps we want to allow remote to specify a different protocol without knowing its IP address? # ENHANCE: perhaps we should do some validation as some networks could make this a problem e.g. a NAT event.source = uri.get_remote_uri(event.source, protocol=protocol, host=hostname, port=port) # Assume the receive time is right now: # NOTE: in case the event was relayed to us from an intermediary, we should overwrite the time_rcvd!! event.metadata['time_rcvd'] = SensedEvent.get_timestamp() # In case the remote's original URI is different than how we got it from the CoAP resource: if relay_uri and relay_uri != event.source: event.metadata.setdefault('relay_uri', relay_uri)
def test_async(self): # THIRD TEST: asynchronous events, which we verify worked by setting appropriate bounds on pub times duration = 5 gen_cfg = dict(topic='fire', publication_period=dict(dist='exp', args=(0.5, ), lbound=0.5, ubound=1), total_time=duration) client = ScaleClient(quit_time=duration + 1, raise_errors=True) broker = client.setup_broker() pub = RandomVirtualSensor(broker, event_generator=gen_cfg) stats_sub = StatisticsApplication(broker, subscriptions=('fire', 'ice')) events_sub = EventStoringApplication(broker, subscriptions=('fire', 'ice')) # get time of start and end; ensure all events have increasing timestamps between these values start_time = SensedEvent.get_timestamp() client.run() # verify expected # events generated self.assertGreaterEqual(stats_sub.get_stats('fire', 'count'), 4) self.assertLessEqual(stats_sub.get_stats('fire', 'count'), 11) # Verify times are as expected # ENHANCE: how to do this automatically? print "MANUALLY verify these pub times look async:" ev_times = [ev.timestamp - start_time for ev in events_sub.events] for i in range(len(ev_times))[1:]: ev_times[i] -= ev_times[i - 1] print ev_times last_time = events_sub.events[0].timestamp for ev in events_sub.events[1:]: self.assertLess(start_time, ev.timestamp) # give a little lag time for the last event self.assertLess(ev.timestamp, start_time + duration + 0.2) # ensure time diff is within our bounds (roughly) this_time = ev.timestamp self.assertLess(this_time - last_time, 1.1) self.assertGreater(this_time - last_time, 0.5) last_time = this_time
def test_total_time(self): # SECOND TEST: limited time duration of published events duration = 3 gen_cfg = dict(topic='fire', publication_period=.5, total_time=duration) client = ScaleClient(quit_time=self.quit_time, raise_errors=True) broker = client.setup_broker() pub = RandomVirtualSensor(broker, event_generator=gen_cfg) stats_sub = StatisticsApplication(broker, subscriptions=('fire', 'ice')) events_sub = EventStoringApplication(broker, subscriptions=('fire', 'ice')) # get time of start and end; ensure all events have increasing timestamps between these values start_time = SensedEvent.get_timestamp() client.run() # verify SOME events generated self.assertGreater(stats_sub.get_stats('fire', 'count'), 3) # Verify times are as expected for ev in events_sub.events: self.assertLess(start_time, ev.timestamp) # give a little lag time for the last event self.assertLess(ev.timestamp, start_time + duration + 0.2) ## EDGE CASE: no events generated with 0 time covered gen_cfg = dict(topic='fire', publication_period=.5, total_time=0) client = ScaleClient(quit_time=self.quit_time, raise_errors=True) broker = client.setup_broker() pub = RandomVirtualSensor(broker, event_generator=gen_cfg) stats_sub = StatisticsApplication(broker, subscriptions=('fire', 'ice')) client.run() # verify no events generated self.assertEqual(stats_sub.get_stats('fire', 'count'), 0)
def publish_neighbors_avarage_temp(self): data = {} data['event'] = 'MeshSensor' data['event_type'] = 'average_temperature' data['value'] = self.relayedSensedEvents['temperature']['neighbors_average'] data['detail'] = {} data['detail']['temp_count'] = self.relayedSensedEvents['temperature']['neighbors_counter'] data['detail']['temp_sum'] = round(self.relayedSensedEvents['temperature']['neighbors_sum']) try: encoded_data = json.dumps(data) event = SensedEvent(data['event'], data, 5) self.publish(event) log.info('Published neighbors avarage temperature to application. Data: ' + encoded_data) return True except: log.error('Invalid average temparature encoded data string') return False
def extract_event(request): """ Extracts a SensedEvent from the payload of the request. Tries to convert it to a remote event if it was left as a local one by setting the host/port/protocol. :param request: :type request: Request :return: the SensedEvent :rtype: SensedEvent """ event = SensedEvent.from_json(request.payload) host, port = request.source try: # TODO: specify coaps if this event came through an encrypted channel? networks.util.process_remote_event(event, hostname=host, port=port, protocol='coap') # save the local resource URI so we know where exactly it entered our local client event.metadata['local_resource_uri'] = uri.build_uri(relative_path=request.uri_path) # QUESTION: should we do something with uri_query? probably not used in a PUT/POST request... except BaseException as e: log.error("error during converting local source to remote source in event extracted from CoAP request: %s" % e) return event
def test_alert_compression(self, n_sensors=20, n_quakes=10, expect_one_packet=False): """Tests the method for compressing the data in a seismic alert event so that it fits in a single CoAP packet. Note that this test doesn't actually verify that the packet isn't fragmented on the wire, but rather relies on the CoAP helper function to check if it's too big.""" # IDEA: create pick events, pass them to server, get its aggregated alert event, compress that, and then verify # that it fits into a single CoAP packet and actually contains recent events events = self._generate_events(n_quakes, n_sensors, event_type=SEISMIC_PICK_TOPIC, time_incr=1) # NOTE: because of the varying event ID lengths and our attempt to quickly cut out some that are over capacity, # we add some more events with longer name lengths to verify that part works. You can enable logging and add # print statements to the function to manually verify this, though automated tests with more than 2 different # event ID lengths would be much better. WARNING: this test wasn't catching a bug due to this fact! big_event_id = 'big_huge_accelerometer_thing%d' events.extend(self._generate_events(n_quakes, n_sensors / 4, event_type=SEISMIC_PICK_TOPIC, sensor_name=big_event_id)) for e in events: self.srv.on_event(e, topic=e.topic) alert = self.srv.read() if not expect_one_packet: self.assertFalse(msg_fits_one_coap_packet(alert.to_json()), "alert msg already fits into one packet! add more pick events...") comp_alert = compress_alert_one_coap_packet(alert) self.assertTrue(msg_fits_one_coap_packet(comp_alert), "compressed alert data doesn't actually fit in one packet!") # double-check the contained events are newer ones decomp_alert = SensedEvent.from_json(comp_alert) # print "alert data was:", alert.data, "\nBut now is:", decomp_alert.data # print "seq #s after compression are:", [get_seq_from_event_id(eid) for eid in decomp_alert.data] self.assertIsInstance(decomp_alert.data, list) # make sure it isn't just a single event string... self.assertTrue(any(get_seq_from_event_id(eid) == n_quakes - 1 for eid in decomp_alert.data)) # should keep newest # Don't run these checks for the tests that verify it works ok with only a few events if not expect_one_packet: self.assertGreaterEqual(len(decomp_alert.data), 5) # where all the events at?? self.assertTrue(all(get_seq_from_event_id(eid) > 0 for eid in decomp_alert.data)) # should throw out oldest # check to make sure we're using most of the packet. Note that we assume here nquakes < 1000 self.assertGreater(len(comp_alert), COAP_MAX_PAYLOAD_SIZE - (len(big_event_id) + 4))
def test_json_encoding_excluded_fields(self): """ Tests whether we can correctly encode a SensedEvent with the exclude_fields option and then correctly decode it back. :return: """ encoded = self.event.to_json(exclude_fields=('schema', 'condition', 'misc', 'prio_value', 'prio_class')) decoded = SensedEvent.from_json(encoded) self.assertEqual(self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded)) # Now verify that excluding these fields at least allows us to decode the resulting encoded event, even though # we KNOW that they will not be truly equal. encoded = self.event.to_json(exclude_fields=('timestamp', 'event_type')) decoded = SensedEvent.from_json(encoded) # should not raise error # Last, verify that excluding these fields DOES cause an error! encoded = self.event.to_json(exclude_fields=('device',)) with self.assertRaises(NotImplementedError): SensedEvent.from_json(encoded) encoded = self.event.to_json(exclude_fields=('value',)) with self.assertRaises(TypeError): SensedEvent.from_json(encoded)
def on_fire(self, event): print "beep beep! FIRE!" self.publish(SensedEvent("fire sensor", "fire alarm!", 1))
def BombLit(source): return SensedEvent(source=source, event_type="bomb_lit", data="psssssss!!", priority=4)
def setUp(self): self.event = SensedEvent(data=10, source="temp0", event_type="temperature") self.minimal_event = SensedEvent(data=10, source="temp0")
def Explosion(source): return SensedEvent(source=source, event_type="explosion", data="BOOM!", priority=1)
def Fire(source): return SensedEvent(source=source, event_type="fire", data="Fire!", priority=2)
class TestSensedEvent(unittest.TestCase): """ Test Scale client SensedEvent class for data manipulation, encoding, and decoding. This should be used for testing different schema versions. """ def setUp(self): self.event = SensedEvent(data=10, source="temp0", event_type="temperature") self.minimal_event = SensedEvent(data=10, source="temp0") def test_basic_json_encoding(self): """ Tests whether SensedEvent encoding/decoding works as expected. :return: """ encoded = self.event.to_json() decoded = SensedEvent.from_json(encoded) self.assertEqual(self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded)) def test_json_encoding_excluded_fields(self): """ Tests whether we can correctly encode a SensedEvent with the exclude_fields option and then correctly decode it back. :return: """ encoded = self.event.to_json(exclude_fields=('schema', 'condition', 'misc', 'prio_value', 'prio_class')) decoded = SensedEvent.from_json(encoded) self.assertEqual(self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded)) # Now verify that excluding these fields at least allows us to decode the resulting encoded event, even though # we KNOW that they will not be truly equal. encoded = self.event.to_json(exclude_fields=('timestamp', 'event_type')) decoded = SensedEvent.from_json(encoded) # should not raise error # Last, verify that excluding these fields DOES cause an error! encoded = self.event.to_json(exclude_fields=('device',)) with self.assertRaises(NotImplementedError): SensedEvent.from_json(encoded) encoded = self.event.to_json(exclude_fields=('value',)) with self.assertRaises(TypeError): SensedEvent.from_json(encoded) def test_schema_versions(self): """ Tests whether events formatted from different schema versions are compatible with the current data model. :return: """ source_device = "scale-local:scale/devices/temperature" v1_map = {"d" : {"event" : "temperature", "value" : 55.5, "units" : "celsius", "timestamp" : 12345678, "device" : source_device, "location" : {"lat" : 33.3, "lon" : "-71"}, "condition" : {"threshold" : {"operator" : ">", "value" : "95"}}, "prio_class": "high", "prio_value": 2, "schema" : "www.schema.org/scale_sensors.1.0.whatever", } } v1_event = SensedEvent.from_map(v1_map) self.assertEqual(v1_event.event_type, 'temperature') self.assertEqual(v1_event.data, 55.5) self.assertEqual(v1_event.priority, 2) self.assertEqual(v1_event.source, source_device) # Now the other way around: dumping to a map, first by looking at JSON encoding... v1_json = json.dumps(v1_map, sort_keys=True) manual_v1_json = json.dumps(json.loads(v1_event.to_json()), sort_keys=True) self.assertEqual(manual_v1_json, v1_json) new_v1_map = v1_event.to_map() self.assertEqual(v1_map, new_v1_map, "making into v1.0 schema map didn't produce identical dict: %s" % new_v1_map) def test_source(self): """ Tests the special field "source" that be a plain string or URI representing a VirtualSensor, DeviceDescriptor, etc... We especially want to make sure that simple strings or None are handled well. :return: """ # TODO: eventually look up objects by URI in order to determine if they refer to the same one? # e.g. we should be able to have a remote device source that refers to a local one of ours be equal... pass def test_defaults(self): """ Tests whether creating a SensedEvent with minimum # arguments causes unexpected errors or leads to a state that will later cause Exceptions to be raised when manipulating the event. :return: """ ev = SensedEvent.from_json(self.minimal_event.to_json()) self.assertEqual(ev, self.minimal_event) # Should be able to specify None values for the data (unary event) or source (anonymous event?) ev = SensedEvent(data=1, source=None) self.assertEqual(SensedEvent.from_json(ev.to_json()), ev) ev = SensedEvent(data=None, source='temperature') self.assertEqual(SensedEvent.from_json(ev.to_json()), ev) def test_is_local(self): """ Tests whether we can properly determine whether a SensedEvent came from our local node or not. :return: """ self.assertTrue(self.minimal_event.is_local, "simple string source should be considered local!") self.assertFalse(SensedEvent(data=1, source="coap://1.1.1.1/scale/events/temp").is_local, "event from coap source is not local!")
def on_start(self): self.publish(SensedEvent("some sensor", "blah", 1))
def __init__(self): SensedEvent.__init__(self, "explosion_sensor", "BOOM!", 1)
class TestSensedEvent(unittest.TestCase): """ Test Scale client SensedEvent class for data manipulation, encoding, and decoding. This should be used for testing different schema versions. """ def setUp(self): self.event = SensedEvent(data=10, source="temp0", event_type="temperature") self.minimal_event = SensedEvent(data=10, source="temp0") def test_basic_json_encoding(self): """ Tests whether SensedEvent encoding/decoding works as expected. :return: """ encoded = self.event.to_json() decoded = SensedEvent.from_json(encoded) self.assertEqual( self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded)) def test_json_encoding_excluded_fields(self): """ Tests whether we can correctly encode a SensedEvent with the exclude_fields option and then correctly decode it back. :return: """ encoded = self.event.to_json(exclude_fields=('schema', 'condition', 'misc', 'prio_value', 'prio_class')) decoded = SensedEvent.from_json(encoded) self.assertEqual( self.event, decoded, "encoding then decoding an event should give an essentially identical one back! have: %s and %s" % (self.event, decoded)) # Now verify that excluding these fields at least allows us to decode the resulting encoded event, even though # we KNOW that they will not be truly equal. encoded = self.event.to_json(exclude_fields=('timestamp', 'event_type')) decoded = SensedEvent.from_json(encoded) # should not raise error # Last, verify that excluding these fields DOES cause an error! encoded = self.event.to_json(exclude_fields=('device', )) with self.assertRaises(NotImplementedError): SensedEvent.from_json(encoded) encoded = self.event.to_json(exclude_fields=('value', )) with self.assertRaises(TypeError): SensedEvent.from_json(encoded) def test_schema_versions(self): """ Tests whether events formatted from different schema versions are compatible with the current data model. :return: """ source_device = "scale-local:scale/devices/temperature" v1_map = { "d": { "event": "temperature", "value": 55.5, "units": "celsius", "timestamp": 12345678, "device": source_device, "location": { "lat": 33.3, "lon": "-71" }, "condition": { "threshold": { "operator": ">", "value": "95" } }, "prio_class": "high", "prio_value": 2, "schema": "www.schema.org/scale_sensors.1.0.whatever", } } v1_event = SensedEvent.from_map(v1_map) self.assertEqual(v1_event.event_type, 'temperature') self.assertEqual(v1_event.data, 55.5) self.assertEqual(v1_event.priority, 2) self.assertEqual(v1_event.source, source_device) # Now the other way around: dumping to a map, first by looking at JSON encoding... v1_json = json.dumps(v1_map, sort_keys=True) manual_v1_json = json.dumps(json.loads(v1_event.to_json()), sort_keys=True) self.assertEqual(manual_v1_json, v1_json) new_v1_map = v1_event.to_map() self.assertEqual( v1_map, new_v1_map, "making into v1.0 schema map didn't produce identical dict: %s" % new_v1_map) def test_source(self): """ Tests the special field "source" that be a plain string or URI representing a VirtualSensor, DeviceDescriptor, etc... We especially want to make sure that simple strings or None are handled well. :return: """ # TODO: eventually look up objects by URI in order to determine if they refer to the same one? # e.g. we should be able to have a remote device source that refers to a local one of ours be equal... pass def test_defaults(self): """ Tests whether creating a SensedEvent with minimum # arguments causes unexpected errors or leads to a state that will later cause Exceptions to be raised when manipulating the event. :return: """ ev = SensedEvent.from_json(self.minimal_event.to_json()) self.assertEqual(ev, self.minimal_event) # Should be able to specify None values for the data (unary event) or source (anonymous event?) ev = SensedEvent(data=1, source=None) self.assertEqual(SensedEvent.from_json(ev.to_json()), ev) ev = SensedEvent(data=None, source='temperature') self.assertEqual(SensedEvent.from_json(ev.to_json()), ev) def test_is_local(self): """ Tests whether we can properly determine whether a SensedEvent came from our local node or not. :return: """ self.assertTrue(self.minimal_event.is_local, "simple string source should be considered local!") self.assertFalse( SensedEvent(data=1, source="coap://1.1.1.1/scale/events/temp").is_local, "event from coap source is not local!")
def __init__(self): SensedEvent.__init__(self, "explosion_sensor", "psssssss!!", 4)
def __init__(self): SensedEvent.__init__(self, "fire_sensor", "Fire!", 2)