def test_file_write_read(self): tf = NamedTemporaryFile(mode='w', delete=False) tf.close() try: sensor = make_test_output_thing(1, stop_after_events=NUM_EVENTS) capture = CaptureInputThing() sensor.connect(capture) sensor.csv_writer(tf.name) scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_recurring(sensor) print("Writing sensor events to temp file") scheduler.run_forever() self.assertTrue(capture.completed, "CaptureInputThing did not complete") self.assertEqual( len(capture.events), NUM_EVENTS, "number of events captured did not match generated events") reader = CsvReader(tf.name) vs = SensorEventValidationInputThing(capture.events, self) reader.connect(vs) scheduler.schedule_recurring(reader) print("reading sensor events back from temp file") scheduler.run_forever() self.assertTrue(vs.completed, "ValidationInputThing did not complete") finally: os.remove(tf.name)
def setUp(self): # Creating a new event loop each test case does not seem to work. # I think it is due to hbmqtt not cleaning up some state in the asyncio # layer. #self.loop = asyncio.new_event_loop() self.loop = asyncio.get_event_loop() self.sched = Scheduler(self.loop)
def test_batching(self): """We write out a set of event from two simulated sensors using an odd batch size (3). We then read them back and verify that we got all the events. """ sensor1 = make_test_output_thing_from_vallist(TEST_SENSOR1, VALUE_STREAM) sensor2 = make_test_output_thing_from_vallist(TEST_SENSOR2, VALUE_STREAM) writer = PredixWriter(PREDIX_INGEST_URL, PREDIX_ZONE_ID, PREDIX_TOKEN, extractor=EventExtractor(attributes={'test':True}), batch_size=3) sensor1.connect(writer) sensor2.connect(writer) scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_periodic(sensor1, 0.5) scheduler.schedule_periodic(sensor2, 0.5) start_time = time.time() scheduler.run_forever() # Now we read the events back reader1 = PredixReader(PREDIX_QUERY_URL, PREDIX_ZONE_ID, PREDIX_TOKEN, TEST_SENSOR1, start_time=start_time, one_shot=False) reader2 = PredixReader(PREDIX_QUERY_URL, PREDIX_ZONE_ID, PREDIX_TOKEN, TEST_SENSOR2, start_time=start_time, one_shot=False) ti1 = TestInput(reader1, 'sensor-1') ti2 = TestInput(reader2, 'sensor-2') scheduler.schedule_periodic(reader1, 2) scheduler.schedule_periodic(reader2, 2) scheduler.run_forever() self.assertListEqual(VALUE_STREAM, ti1.values) self.assertListEqual(VALUE_STREAM, ti2.values)
def run(args, token): sensor1 = TestSensor.output_thing(TEST_SENSOR1, 5) writer = PredixWriter(args.ingest_url, args.predix_zone_id, token, extractor=EventExtractor(attributes={'test': True}), batch_size=3) sensor1.connect(writer) sensor1.connect(print) # also print the event scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_periodic(sensor1, 0.5) start_time = time.time() scheduler.run_forever() print("Reading back events") reader1 = PredixReader(args.query_url, args.predix_zone_id, token, TEST_SENSOR1, start_time=start_time, one_shot=True) reader1.connect(print) scheduler.schedule_recurring(reader1) scheduler.run_forever()
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Distributed lux example, data capture process') parser.add_argument('-i', '--interval', type=float, default=5.0, help="Sample interval in seconds") parser.add_argument( '-t', '--threshold', type=float, default=25.0, help="Threshold lux level above which light should be turned on") parser.add_argument('broker', metavar="BROKER", type=str, help="hostname or ip address of mqtt broker") parsed_args = parser.parse_args(argv) (lux, led) = setup(parsed_args.broker, parsed_args.threshold) scheduler = Scheduler(asyncio.get_event_loop()) stop = scheduler.schedule_periodic_on_separate_thread( lux, parsed_args.interval) print("starting run...") try: scheduler.run_forever() except KeyboardInterrupt: led.on_completed() stop() return 0
def test_tsl2591(self): import thingflow.sensors.rpi.lux_sensor sensor = SensorAsOutputThing( thingflow.sensors.rpi.lux_sensor.LuxSensor()) s = Scheduler(asyncio.get_event_loop()) stop = s.schedule_periodic(sensor, 1.0) StopAfterN(sensor, stop, N=4).output() s.run_forever()
def test_blocking_output_thing(self): o = BlockingOutputThing() o.output() scheduler = Scheduler(asyncio.get_event_loop()) c = scheduler.schedule_periodic_on_separate_thread(o, 1) vs = ValidationInputThing([i + 1 for i in range(EVENTS)], self, extract_value_fn=lambda v: v) o.connect(vs) o.connect(StopLoopAfter(EVENTS, c)) o.print_downstream() scheduler.run_forever() print("that's it")
class TestCase(unittest.TestCase): def setUp(self): # Creating a new event loop each test case does not seem to work. # I think it is due to hbmqtt not cleaning up some state in the asyncio # layer. #self.loop = asyncio.new_event_loop() self.loop = asyncio.get_event_loop() self.sched = Scheduler(self.loop) def tearDown(self): pass #self.loop.stop() #self.loop.close() def test_client_only(self): SENSOR_ID = 'sensor-1' TOPIC = get_topic_name(self) sensor = SensorAsOutputThing(ValueListSensor(SENSOR_ID, VALUES)) td = sensor.transduce(PeriodicMedianTransducer(period=3)) qw = QueueWriter(td, URL, TOPIC, self.sched) qw.output() self.sched.schedule_periodic(sensor, 0.5) self.sched.run_forever() self.assertFalse( qw.has_pending_requests(), "QueueWriter has pending requests: %s" % qw.dump_state()) print("test_client_only completed") def send_and_recv_body(self, sleep_timeout): SENSOR_ID = 'sensor-1' TOPIC = get_topic_name(self) sensor = SensorAsOutputThing(ValueListSensor(SENSOR_ID, VALUES)) td = sensor.transduce(PeriodicMedianTransducer(period=3)) qw = QueueWriter(td, URL, TOPIC, self.sched) qw.output() qr = QueueReader(URL, TOPIC, self.sched, timeout=sleep_timeout) self.sched.schedule_periodic(sensor, 0.5) stop_qr = self.sched.schedule_on_main_event_loop(qr) vs = ValidateAndStopInputThing(EXPECTED, self, stop_qr) qr.select(msg_to_event).connect(vs) self.sched.run_forever() self.assertFalse( qw.has_pending_requests(), "QueueWriter has pending requests: %s" % qw.dump_state()) self.assertEqual(qr.state, QueueReader.FINAL_STATE) self.assertEqual(vs.next_idx, len(EXPECTED)) print("send_and_recv_bod(%s) completed" % sleep_timeout) def test_short_timeout(self): self.send_and_recv_body(0.1) def test_long_timeout(self): self.send_and_recv_body(3.0)
def test_case(self): """Just run the reader in its own event loop. We stop everything after 4 events. """ s = Scheduler(asyncio.get_event_loop()) m = MQTTReader("localhost", topics=[ ('bogus/bogus', 0), ], mock_class=MockMQTTClient) m.output() c = s.schedule_on_private_event_loop(m) m.connect(StopLoopAfter(4, c)) m.print_downstream() s.run_forever() print("that's it")
def test_publish_and_subscribe(self): sensor = ValueListSensor(1, sensor_values) scheduler = Scheduler(asyncio.get_event_loop()) pg = PostgresWriter(scheduler, self.connect_string, self.mapping) capture = CaptureInputThing() scheduler.schedule_sensor(sensor, 0.5, parallel(pg, output, capture)) scheduler.run_forever() print("finish writing to the database") row_source = PostgresReader(self.connect_string, self.mapping) row_source.output() validate = SensorEventValidationInputThing(capture.seq, self) row_source.connect(validate) scheduler.schedule_recurring(row_source) scheduler.run_forever() self.assertTrue(validate.completed) print("finished reading rows")
def main(): parser = argparse.ArgumentParser( description= "Subscribe to the specified topic and write the resulting messages to Influxdb" ) parser.add_argument( '--mqtt-host', type=str, default='localhost', help="Hostname or IP address of MQTT broker (defaults to localhost)") parser.add_argument( '--topic-name', type=str, default='sensor-data', help="Topic for subscription (defaults to sensor-data)") parser.add_argument('--influx-host', type=str, default='localhost', help="Influx db host (defaults to localhost)") parser.add_argument('--influx-username', type=str, default='root', help="Influx db username (defaults to root)") parser.add_argument('--influx-password', type=str, default=None, help="Influx db password (defaults to None)") parser.add_argument('--influx-database', type=str, default='sensor-data', help="Influx db database (defaults to sensor-data)") parser.add_argument('--influx-measurement', type=str, default='lux', help="Influx db measurement (defaults to lux)") args = parser.parse_args() mqtt = setup_flow(args) scheduler = Scheduler(asyncio.get_event_loop()) stop = scheduler.schedule_on_private_event_loop(mqtt) print("Running main loop") try: scheduler.run_forever() except KeyboardInterrupt: print("Stopping...") stop() return 0
def main(argv=sys.argv[1:]): if len(argv) != 1: print("%s interval" % sys.argv[0]) return 1 interval = float(argv[0]) print("%f seconds interval" % interval) lux1, lux2 = setup() scheduler = Scheduler(asyncio.get_event_loop()) stop1 = scheduler.schedule_periodic(lux1, interval) stop2 = scheduler.schedule_periodic(lux2, interval) print("starting run...") try: scheduler.run_forever() except KeyboardInterrupt: stop1() stop2() return 0
def test_mqtt(self): loop = asyncio.get_event_loop() s = Scheduler(loop) sensor = make_test_output_thing_from_vallist(1, sensor_data) mqtt_writer = MQTTWriter('localhost', topics=[ ('bogus/bogus', 0), ]) sensor.to_json().connect(mqtt_writer) s.schedule_periodic(sensor, 0.5) mqtt_reader = MQTTReader("localhost", topics=[ ('bogus/bogus', 0), ]) vs = ValidationInputThing(sensor_data, self) mqtt_reader.take(5).select(mqtt_msg_to_unicode).from_json(constructor=SensorEvent) \ .output().connect(vs) c = s.schedule_on_private_event_loop(mqtt_reader) stop = StopLoopAfter(5, c) mqtt_reader.connect(stop) mqtt_reader.print_downstream() sensor.print_downstream() s.run_forever() loop.stop() self.assertTrue(vs.completed) print("that's it")
def setup(host): mqtt = MQTTReader(host, topics=[('bogus/bogus', 2)]) decoded = mqtt.select(lambda m:(m.payload).decode("utf-8")) \ .from_json(constructor=SensorEvent) scheduler = Scheduler(asyncio.get_event_loop()) decoded.connect(PostgresWriter(scheduler, connect_string, mapping)) decoded.output() mqtt.print_downstream() return mqtt, scheduler
def main(argv=sys.argv[1:]): if len(argv)!=2: print("%s threshold interval" % sys.argv[0]) return 1 threshold = float(argv[0]) interval = float(argv[1]) print("%f seconds interval and an led threshold of %f lux" % (interval, threshold)) (lux, led) = setup(threshold) scheduler = Scheduler(asyncio.get_event_loop()) stop = scheduler.schedule_periodic_on_separate_thread(lux, interval) print("starting run...") try: scheduler.run_forever() except KeyboardInterrupt: led.on_completed() stop() return 0
def test_function_filter(self): """Verify the function filter class """ s = ValueListSensor(1, value_stream) st = SensorAsOutputThing(s) captured_list_ref = [[]] got_completed_ref = [ False, ] def on_next(self, x): captured_list_ref[0].append(x.val) self._dispatch_next(x) def on_completed(self): got_completed_ref[0] = True self._dispatch_completed() ff = FunctionFilter(st, on_next=on_next, on_completed=on_completed) vo = ValidationInputThing(value_stream, self.test_function_filter) ff.connect(vo) st.print_downstream() scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_periodic(st, 0.5) # sample twice every second scheduler.run_forever() self.assertTrue( vo.completed, "Schedule exited before validation observer completed") self.assertEqual(value_stream, captured_list_ref[0]) self.assertTrue(got_completed_ref[0]) print("That's all folks")
def test_periodic(self): s = SensorAsOutputThing(RandomSensor(1)) PrintAndDeschedule(s) scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_periodic(s, 0.25) scheduler.run_forever() print("Exited successfully")
def test_dispatch(self): """Test a scenario where we dispatch to one of several writers depending on the sensor id. """ def generator(): for e in EVENTS2: yield e sensor = IterableAsOutputThing(generator(), name='sensor') dispatcher = sensor.dispatch(dispatch_rules) for s in sensor_ids: dispatcher.rolling_csv_writer('.', s, sub_port=s) dispatcher.connect( lambda x: self.assertTrue(False, "bad dispatch of %s" % x)) scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_recurring(sensor) scheduler.run_forever() for f in FILES2: self.assertTrue(os.path.exists(f), 'did not find file %s' % f) cnt = 0 with open(f, 'r') as fobj: for line in fobj: cnt += 1 self.assertEqual(2, cnt, "File %s did not have 2 lines" % f) print("found log file %s" % f)
def test_function_filter_error_handling(self): """Verify the error handling functionality of the function filter. We do this by connecting two downstream paths to the sensor. The first includes a function filter that throws an error when it encouters a sensor reading of 120. This should disconnect th stream at this point. The second is a normal validation input thing. It is connected directly to the sensor, and thus should not see any errors. """ s = ValueListSensor(1, value_stream) st = SensorAsOutputThing(s) captured_list_ref = [[]] got_completed_ref = [ False, ] got_on_error_ref = [ False, ] def on_next_throw_exc(self, x): if x.val == 120: raise Exception("expected exc") else: captured_list_ref[0].append(x.val) self._dispatch_next(x) def on_completed(self): got_completed_ref[0] = True self._dispatch_completed() def on_error(self, e): got_on_error_ref[0] = True self._dispatch_error(e) ff = FunctionFilter(st, on_next=on_next_throw_exc, on_completed=on_completed, on_error=on_error) ct = CaptureInputThing(expecting_error=True) ff.map(lambda x: x.val).connect(ct) vo = ValidationInputThing(value_stream, self.test_function_filter_error_handling) st.connect(vo) st.print_downstream() scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_periodic(st, 0.5) # sample twice every second scheduler.run_forever() self.assertTrue( vo.completed, "Schedule exited before validation observer completed") self.assertFalse(ct.completed, "Capture thing should not have completed") self.assertTrue(ct.errored, "Capture thing should have seen an error") self.assertFalse(got_completed_ref[0]) self.assertTrue(got_on_error_ref[0]) self.assertEqual([20, 30, 100], ct.events, "Capture thing event mismatch") self.assertEqual([20, 30, 100], captured_list_ref[0], "captured_list_ref mismatch") print("That's all folks")
def main_linear(): obs_stream = from_iterable( iter([[[[1.0, 1.0], [2.0, 2.0]], [1.0, 2.0]], [[[6.0, 6.0], [9.0, 9.0]], [6.0, 9.0]]])) pred_stream = from_iterable(iter([[3.0, 3.0]])) model = SGDLinearRegressionModel() obs_stream.connect(model, port_mapping=('default', 'train')) obs_stream.connect(print) pred_stream.connect(model, port_mapping=('default', 'observe')) model.connect(print, port_mapping=('predict', 'default')) scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_periodic(obs_stream, 1) scheduler.schedule_periodic(pred_stream, 5) scheduler.run_forever()
def test(self): scheduler = Scheduler(asyncio.get_event_loop()) sensor = make_test_output_thing_from_vallist(1, values) scheduler.schedule_periodic(sensor, 1) blocking_subscriber = TestInputThing(scheduler, values, self) sensor.connect(blocking_subscriber) scheduler.run_forever() self.assertTrue(blocking_subscriber.completed)
def main(): parser = argparse.ArgumentParser( description= "Subscribe to the specified port and write the resulting messages") parser.add_argument('topic_name', metavar='TOPIC_NAME', type=str, help="Topic for subscription") parser.add_argument('csv_filename', metavar="CSV_FILENAME", type=str, help="Name of CSV file to write with sensor data") args = parser.parse_args() mqtt = setup_flow(args.topic_name, args.csv_filename) scheduler = Scheduler(asyncio.get_event_loop()) stop = scheduler.schedule_on_private_event_loop(mqtt) print("Running main loop") try: scheduler.run_forever() except KeyboardInterrupt: print("Stopping...") stop() return 0
def __init__(self): self.logger = Logger() self.config = Config() self.id = self.config.get('id') self.type = self.config.get('type') self.sensors = self.config.get('sensors') self.data = dict() self.data['id'] = self.id self.data['type'] = self.type self.data['model'] = self.config.get('model') self.data['implementation'] = dict() self.data['implementation']['name'] = sys.implementation.name self.data['implementation']['version'] = '.'.join( str(x) for x in sys.implementation.version) self.data['platform'] = sys.platform self.data['sensors'] = dict() self.data['os'] = self.getOsData() self.logger.notice("MicroNet Agent started on %s (%s)" % (self.id, self.type)) self.conn = MQTTConnector(self.id) self.conn.set_last_will('micronet/devices/' + self.id + '/online', 'false') self.scheduler = Scheduler(asyncio.get_event_loop())
def test_gpio(self): import thingflow.adapters.rpi.gpio o = thingflow.adapters.rpi.gpio.GpioPinOut() sensor_thing = SensorAsOutputThing(ValueListSensor("sensor-1", values)) sensor_thing.map(lambda evt: evt.val > 0).passthrough( output()).connect(o) s = Scheduler(asyncio.get_event_loop()) s.schedule_periodic(sensor_thing, 1.0) s.run_forever()
def test_error_handling(self): """This is a non-fatal error, so we should just print the error and exit cleanly without propagating the exception. """ s = IterableAsOutputThing(ErrorIterator(expected_stream)) s.output() scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_periodic(s, 0.5) # sample twice every second s.print_downstream() scheduler.run_forever()
class TestCase(unittest.TestCase): def setUp(self): self.scheduler = Scheduler(asyncio.get_event_loop()) self.sensor = ValueListSensor(1, value_stream) def test_sensor_event_sliding_window(self): vs = ValidationInputThing(mean_stream, self) self.scheduler.schedule_sensor(self.sensor, 0.1, transduce(SensorSlidingMean(4)), parallel(vs, output())) self.scheduler.run_forever() self.assertTrue(vs.completed) def test_periodic_median_transducer(self): vs = ValidationInputThing(periodic_median_stream, self) self.scheduler.schedule_sensor(self.sensor, 0.1, transduce(PeriodicMedianTransducer(3)), parallel(vs, output())) self.scheduler.run_forever() self.assertTrue(vs.completed)
def test_blocking_sensor(self): s = BlockingSensor(1, stop_after=EVENTS) scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_sensor_on_separate_thread( s, 1, passthrough(output()), ValidationInputThing([i + 1 for i in range(EVENTS)], self, extract_value_fn=lambda v: v), make_event_fn=lambda s, v: v) scheduler.run_forever() print("that's it")
def test_schedule_sensor(self): """In this version, we pass the sensor directly to the scheduler and use a functional style to compose the filters""" s = ValueListSensor(1, value_stream) vo = ValidationInputThing(expected_stream, self.test_schedule_sensor) scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_sensor(s, 0.5, where(predicate), passthrough(vo), output()) scheduler.run_forever() self.assertTrue( vo.completed, "Schedule exited before validation observer completed") print("That's all folks")
def test_passthrough_as_a_method(self): """Verify that, when passthrough is used as a method, it can still take thunks. """ scheduler = Scheduler(asyncio.get_event_loop()) luxpub = SensorAsOutputThing(ValueListSensor('lux-2', lux_data)) vs1 = ValidationInputThing([450, 600], self) vs2 = ValidationInputThing(lux_data, self) luxpub.passthrough(compose(where(lambda evt: evt.val > 300), vs1)).connect(vs2) scheduler.schedule_periodic(luxpub, 0.5) scheduler.run_forever() self.assertTrue(vs1.completed) self.assertTrue(vs2.completed)
def test_rollover(self): def generator(): for e in EVENTS: yield e sensor = IterableAsOutputThing(generator(), name='sensor') sensor.rolling_csv_writer('.', 'dining-room') vs = SensorEventValidationInputThing(EVENTS, self) sensor.connect(vs) scheduler = Scheduler(asyncio.get_event_loop()) scheduler.schedule_recurring(sensor) scheduler.run_forever() for f in FILES: self.assertTrue(os.path.exists(f), 'did not find file %s' % f) print("found log file %s" % f)