def _make_event(self, last_event, val): """Return an event based off the last event, but with the specified value. This is for SensorEvent. Override if you have a different event definition. """ return SensorEvent(sensor_id=last_event.sensor_id, ts=last_event.ts, val=val)
def complete(self): if self.events_since_last>0: # if we have some partial state, we emit one final event that # averages whatever we saw since the last emission. return SensorEvent(sensor_id=self.last_event.sensor_id, ts=self.last_event.ts, val=median(self.samples[0:self.events_since_last]))
def test_default_mapper(self): """Verify the class that maps between an event and a sensor """ event = SensorEvent(ts=time.time(), sensor_id=1, val=123.456) row = default_event_mapper.event_to_row(event) event2 = default_event_mapper.row_to_event(row) self.assertEqual(event2, event, "Round-tripped event does not match original event")
def row_to_event(self, row): ts = float(row[0]) try: sensor_id = int(row[2]) except ValueError: sensor_id = row[2] # does ot necessarily have to be an int val = float(row[3]) return SensorEvent(ts=ts, sensor_id=sensor_id, val=val)
def step(self, v): self.samples[self.events_since_last] = v.val self.events_since_last += 1 if self.events_since_last == self.period: val = median(self.samples) event = SensorEvent(sensor_id=v.sensor_id, ts=v.ts, val=val) self.events_since_last = 0 return event else: self.last_event = v # save in case we complete before completing a period return None
def setup_flow(topic): mqtt = MQTTReader('localhost', topics=[(topic, 0),]) ring = RingInputThing() decoded = (mqtt .map(lambda m:(m.payload).decode('utf-8')) .from_json(constructor=SensorEvent) .map(lambda evt: SensorEvent(sensor_id=evt.sensor_id, ts=evt.ts, val=int(evt.val * 255)))) decoded.output() decoded.connect(ring) return mqtt
def setup_flow(topic, filename): mqtt = MQTTReader('localhost', topics=[ (topic, 0), ]) decoded = mqtt.select(lambda m:(m.payload).decode('utf-8'))\ .from_json(constructor=SensorEvent)\ .select(lambda evt: SensorEvent(sensor_id=evt.sensor_id, ts=time.time(), val=evt.val)) decoded.output() decoded.csv_writer(filename) return mqtt
def on_next(self, x): if (self.last_time is not None) and \ (x.ts - self.last_time)>MAX_TIME_INTERVAL: ts = self.last_time + EXPECTED_TIME_INTERVAL missing = 0 while (x.ts - ts) > EXPECTED_TIME_INTERVAL: if missing == 0: self._dispatch_next( SensorEvent(sensor_id=x.sensor_id, ts=ts, val=NaN)) ts += EXPECTED_TIME_INTERVAL missing += 1 print("Found %s missing samples" % missing) self.last_time = x.ts self._dispatch_next(x)
def _observe(self): try: row = self.reader.__next__() event = SensorEvent(ts=float(row[0]), sensor_id=row[1], val=float(row[2])) self._dispatch_next(event) except StopIteration: self.file.close() self._dispatch_completed() except FatalError: self._close() raise except Exception as e: self.file.close() self._dispatch_error(e)
async def sample_and_process(sensor, mqtt_writer, xducer): try: sample = sensor.sample() except StopIteration: final_event = xducer.complete() if final_event: await mqtt_writer.send(final_event) print("disconnecting") await mqtt_writer.disconnect() return False event = SensorEvent(sensor_id=sensor.sensor_id, ts=time.time(), val=sample) csv_writer(event) median_event = xducer.step(event) if median_event: await mqtt_writer.send(median_event) return True
def step(self, event): total = event.val # always include the latest cnt = 1 new_start = 0 for (i, old_event) in enumerate(self.history): if (event.ts - old_event.ts) < self.history_interval: total += old_event.val cnt += 1 else: # the timestamp is stale new_start = i + 1 # will at least start at the next one if new_start > 0: self.history = self.history[new_start:] self.history.append(event) return SensorEvent(ts=event.ts, sensor_id=event.sensor_id, val=total / cnt)
def setup_flow(args): mqtt = MQTTReader(args.mqtt_host, topics=[ (args.topic_name, 0), ]) decoded = mqtt.select(lambda m:(m.payload).decode('utf-8'))\ .from_json(constructor=SensorEvent)\ .select(lambda evt: SensorEvent(sensor_id=evt.sensor_id, ts=time.time(), val=evt.val)) decoded.output() w = InfluxDBWriter(msg_format=Sensor(series_name=args.influx_measurement, fields=['val', 'ts'], tags=['sensor_id']), generate_timestamp=False, username=args.influx_username, password=args.influx_password, database=args.influx_database) decoded.connect(w) return mqtt
def sample_and_process(sensor, mqtt_writer, xducer, completion_cb, error_cb): try: sample = sensor.sample() except StopIteration: final_event = xducer.complete() if final_event: mqtt_writer.send(final_event, lambda: mqtt_writer.disconnect(lambda: completion_cb(False), error_cb), error_cb) else: mqtt_writer.disconnect(lambda: completion_cb(False), error_cb) return event = SensorEvent(sensor_id=sensor.sensor_id, ts=time.time(), val=sample) csv_writer(event) median_event = xducer.step(event) if median_event: mqtt_writer.send(median_event, lambda: completion_cb(True), error_cb) else: completion_cb(True)
scheduler = Scheduler(asyncio.get_event_loop()) if HAS_LOCAL_SENSOR: sensor = SensorAsOutputThing(LuxSensor(sensor_id=LOCAL_SENSOR_ID)) sensor.rolling_csv_writer(DIRECTORY, LOCAL_SENSOR_ID) sensor.output() scheduler.schedule_periodic_on_separate_thread(sensor, 60) mqtt_reader = MQTTReader('localhost', client_id='rpi', topics=[ ('remote-sensors', 0), ]) # we convert the tuple received into a SensorEvent, overwriting the timestamp. dispatcher = mqtt_reader.map(lambda m:(m.payload).decode("utf-8"))\ .from_json()\ .map(lambda tpl: SensorEvent(sensor_id=tpl[0], ts=time.time(), val=tpl[2]))\ .dispatch(dispatch_rules) # For each remote sensor, we create a separate csv writer for remote in REMOTE_SENSORS: dispatcher.rolling_csv_writer(DIRECTORY, remote, sub_port=remote).output() dispatcher.connect(lambda x: print("Unexpected sensor %s, full event was %s" % (x.sensor_id, x))) #mqtt_reader.output() mqtt_reader.print_downstream() scheduler.schedule_on_private_event_loop(mqtt_reader) print("Starting run...") scheduler.run_forever()
def msg_to_event(msg): return SensorEvent(sensor_id=msg[0], ts=msg[1], val=msg[2])
def generator(): for val in values: yield SensorEvent(sensor_id, time.time(), val)
def generator(): while True: yield SensorEvent(sensor_id, time.time(), random.gauss(mean, stddev))
def generator(): for i in range(stop_after_events): yield SensorEvent(sensor_id, time.time(), random.gauss(mean, stddev))
def row_to_event(self, row): assert len(row) == 3, "Expecting 3 elements, got '%s'" % row.__repr__() #dt = datetime.datetime.strptime(row[0], "%Y-%m-%d %H:%M:%S") return SensorEvent(ts=row[0].timestamp(), sensor_id=row[1], val=row[2])
def _make_event(self, val): return SensorEvent(ts=time.time(), sensor_id='Controller', val=val)
def _replace_event(self, state, new_event, old_event, total_events): new_state = state + new_event.val - old_event.val new_event = SensorEvent(sensor_id=new_event.sensor_id, ts=new_event.ts, val=(new_state)/total_events) return (new_event, new_state)
os.remove(tf.name) # data for rollover test ROLLING_FILE1 = 'dining-room-2015-01-01.csv' ROLLING_FILE2 = 'dining-room-2015-01-02.csv' FILES = [ROLLING_FILE1, ROLLING_FILE2] def make_ts(day, hr, minute): return (datetime.datetime(2015, 1, day, hr, minute) - datetime.datetime(1970, 1, 1)).total_seconds() EVENTS = [ SensorEvent('dining-room', make_ts(1, 11, 1), 1), SensorEvent('dining-room', make_ts(1, 11, 2), 2), SensorEvent('dining-room', make_ts(2, 11, 1), 3), SensorEvent('dining-room', make_ts(2, 11, 2), 4) ] # data for dispatch test sensor_ids = ['dining-room', 'living-room'] ROLLING_FILE3 = 'living-room-2015-01-01.csv' ROLLING_FILE4 = 'living-room-2015-01-02.csv' FILES2 = [ROLLING_FILE1, ROLLING_FILE2, ROLLING_FILE3, ROLLING_FILE4] EVENTS2 = [ SensorEvent('dining-room', make_ts(1, 11, 1), 1), SensorEvent('living-room', make_ts(1, 11, 2), 2), SensorEvent('living-room', make_ts(2, 11, 1), 3), SensorEvent('dining-room', make_ts(2, 11, 2), 4)