def complete(self): if self.events_since_last>0: # if we have some partial state, we emit one final event that # averages whatever we saw since the last emission. return SensorEvent(sensor_id=self.last_event.sensor_id, ts=self.last_event.ts, val=median(self.samples[0:self.events_since_last]))
def _make_event(self, last_event, val): """Return an event based off the last event, but with the specified value. This is for SensorEvent. Override if you have a different event definition. """ return SensorEvent(sensor_id=last_event.sensor_id, ts=last_event.ts, val=val)
def test_default_mapper(self): """Verify the class that maps between an event and a sensor """ event = SensorEvent(ts=time.time(), sensor_id=1, val=123.456) row = default_event_mapper.event_to_row(event) event2 = default_event_mapper.row_to_event(row) self.assertEqual(event2, event, "Round-tripped event does not match original event")
def step(self, v): self.samples[self.events_since_last] = v.val self.events_since_last += 1 if self.events_since_last == self.period: val = median(self.samples) event = SensorEvent(sensor_id=v.sensor_id, ts=v.ts, val=val) self.events_since_last = 0 return event else: self.last_event = v # save in case we complete before completing a period return None
def mk_csv(): sid = 'temp' val = 0 csv = [ ] for i in range(3): ts = time.mktime(datetime.datetime.now().timetuple()) dt = ts val = val + 1 csv.append([SensorEvent(ts=ts, sensor_id=sid, val=val)]) time.sleep(1) return csv
def test_bokeh_output(): loop = asyncio.get_event_loop() s = ValueListSensor(1, value_stream) p = SensorPub(s) b = BokehStreamer([ SensorEvent(ts=0,val=10,sensor_id="temp" ) ], io_loop=loop) p.subscribe(b) scheduler = Scheduler(loop) scheduler.schedule_periodic(p, 0.5) # sample twice every second scheduler.run_forever() self.assertTrue(vo.completed, "Schedule exited before validation observer completed") print("That's all folks")
def _observe(self): try: row = self.reader.__next__() event = SensorEvent(ts=float(row[0]), sensor_id=row[1], val=float(row[2])) self._dispatch_next(event) except StopIteration: self.file.close() self._dispatch_completed() except FatalError: self._close() raise except Exception as e: self.file.close() self._dispatch_error(e)
async def sample_and_process(sensor, mqtt_writer, xducer): try: sample = sensor.sample() except StopIteration: final_event = xducer.complete() if final_event: await mqtt_writer.send(final_event) print("disconnecting") await mqtt_writer.disconnect() return False event = SensorEvent(sensor_id=sensor.sensor_id, ts=time.time(), val=sample) csv_writer(event) median_event = xducer.step(event) if median_event: await mqtt_writer.send(median_event) return True
def step(self, event): total = event.val # always include the latest cnt = 1 new_start = 0 for (i, old_event) in enumerate(self.history): if (event.ts - old_event.ts) < self.history_interval: total += old_event.val cnt += 1 else: # the timestamp is stale new_start = i + 1 # will at least start at the next one if new_start > 0: self.history = self.history[new_start:] self.history.append(event) return SensorEvent(ts=event.ts, sensor_id=event.sensor_id, val=total / cnt)
def sample_and_process(sensor, mqtt_writer, xducer, completion_cb, error_cb): try: sample = sensor.sample() except StopIteration: final_event = xducer.complete() if final_event: mqtt_writer.send( final_event, lambda: mqtt_writer.disconnect( lambda: completion_cb(False), error_cb), error_cb) else: mqtt_writer.disconnect(lambda: completion_cb(False), error_cb) return event = SensorEvent(sensor_id=sensor.sensor_id, ts=time.time(), val=sample) csv_writer(event) median_event = xducer.step(event) if median_event: mqtt_writer.send(median_event, lambda: completion_cb(True), error_cb) else: completion_cb(True)
def _replace_event(self, state, new_event, old_event, total_events): new_state = state + new_event.val - old_event.val new_event = SensorEvent(sensor_id=new_event.sensor_id, ts=new_event.ts, val=(new_state) / total_events) return (new_event, new_state)
def _make_event(self, val): return SensorEvent(ts=time.time(), sensor_id='Controller', val=val)
os.remove(tf.name) # data for rollover test ROLLING_FILE1 = 'dining-room-2015-01-01.csv' ROLLING_FILE2 = 'dining-room-2015-01-02.csv' FILES = [ROLLING_FILE1, ROLLING_FILE2] def make_ts(day, hr, minute): return (datetime.datetime(2015, 1, day, hr, minute) - datetime.datetime(1970, 1, 1)).total_seconds() EVENTS = [ SensorEvent('dining-room', make_ts(1, 11, 1), 1), SensorEvent('dining-room', make_ts(1, 11, 2), 2), SensorEvent('dining-room', make_ts(2, 11, 1), 3), SensorEvent('dining-room', make_ts(2, 11, 2), 4) ] # data for dispatch test sensor_ids = ['dining-room', 'living-room'] ROLLING_FILE3 = 'living-room-2015-01-01.csv' ROLLING_FILE4 = 'living-room-2015-01-02.csv' FILES2 = [ROLLING_FILE1, ROLLING_FILE2, ROLLING_FILE3, ROLLING_FILE4] EVENTS2 = [ SensorEvent('dining-room', make_ts(1, 11, 1), 1), SensorEvent('living-room', make_ts(1, 11, 2), 2), SensorEvent('living-room', make_ts(2, 11, 1), 3), SensorEvent('dining-room', make_ts(2, 11, 2), 4)
def msg_to_event(msg): return SensorEvent(sensor_id=msg[0], ts=msg[1], val=msg[2])
def row_to_event(self, row): assert len(row) == 3, "Expecting 3 elements, got '%s'" % row.__repr__() #dt = datetime.datetime.strptime(row[0], "%Y-%m-%d %H:%M:%S") return SensorEvent(ts=row[0].timestamp(), sensor_id=row[1], val=row[2])