def test_has_json_representation(self): # create an input stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # create an output stream dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.UINT16) dest.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.EVENT) for x in range(5) ] m = module.from_config(self.config) m.inputs = {'input': src} m.outputs = dict(output=dest) result = m.to_json() # make sure basic attributes are in the output self.assertEqual(result['name'], 'test') self.assertEqual(result['exec_cmd'], '/bin/runit.sh') # make sure inputs are included (name: id) self.assertEqual(result['inputs']['input'], 0) self.assertEqual(result['outputs']['output'], 1)
def test_creates_nilmdb_stream_if_necessary(self): source_server = FakeJoule() nilmdb_msgs = multiprocessing.Queue() dest_url = self._start_nilmdb(nilmdb_msgs) # create just the source stream src_data = create_source_data(source_server) # helpers.create_data(src.layout) src_stream = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src_stream.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)] source_server.add_stream('/test/destination', src_stream, StreamInfo(None, None, 0), None) self.start_server(source_server) runner = CliRunner() result = runner.invoke(main, ['data', 'copy', '-d', dest_url, '/test/source', '/test/destination'], catch_exceptions=False) _print_result_on_error(result) # expect a stream create call nilmdb_call = nilmdb_msgs.get() self.assertEqual('stream_create', nilmdb_call['action']) self.assertEqual({'path': '/test/destination', 'layout': 'float32_3'}, nilmdb_call['params']) # expect a metadata call nilmdb_call = nilmdb_msgs.get() self.assertEqual('set_metadata', nilmdb_call['action']) self.assertEqual('/test/destination', nilmdb_call['params']['path']) self.assertEqual('config_key__', list(json.loads(nilmdb_call['params']['data']).keys())[0]) # expect data transfer call nilmdb_call = nilmdb_msgs.get() self.assertEqual('stream_insert', nilmdb_call['action']) data = nilmdb_call['data'] np.testing.assert_array_equal(src_data, data) self.assertEqual(0, result.exit_code) self._stop_nilmdb() self.stop_server() del nilmdb_msgs
def test_copies_data_to_nilmdb(self): source_server = FakeJoule() nilmdb_msgs = multiprocessing.Queue() dest_url = self._start_nilmdb(nilmdb_msgs) # create the source and destination streams src_data = create_source_data(source_server) # helpers.create_data(src.layout) src_stream = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src_stream.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)] source_server.add_stream('/test/destination', src_stream, StreamInfo(None, None, 0), None) self.start_server(source_server) runner = CliRunner() result = runner.invoke(main, ['data', 'copy', '-d', dest_url, '/test/source', '/existing/float32_3'], input='y\n', catch_exceptions=True) _print_result_on_error(result) # expect data transfer call nilmdb_call = nilmdb_msgs.get() self.assertEqual('stream_insert', nilmdb_call['action']) data = nilmdb_call['data'] np.testing.assert_array_equal(src_data, data) self.assertEqual(0, result.exit_code) self._stop_nilmdb() self.stop_server() del nilmdb_msgs
def test_when_destination_is_invalid(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout, length=4) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, np.ndarray([])) self.start_server(server) runner = CliRunner() result = runner.invoke(main, ['data', 'copy', '/test/source', 'badpath']) self.assertTrue('Error' in result.output and 'destination' in result.output) self.assertEqual(result.exit_code, 1) self.stop_server()
def create_source_data(server, no_intervals=False): # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data in four intervals between [0, 100] src_data = helpers.create_data(src.layout, length=100, start=1548353881 * 1e6, step=1e6) ts = src_data['timestamp'] if no_intervals: intervals = [] src_info = StreamInfo(None, None, 0) else: intervals = [[ts[0], ts[24]], [ts[25], ts[49]], [ts[50], ts[74]], [ts[75], ts[99]]] src_info = StreamInfo(intervals[0][0], intervals[-1][1], len(src_data)) server.add_stream('/test/source', src, src_info, src_data, intervals) return intervals
def create_source_data(server): # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data in four intervals between [0, 100] src_data = helpers.create_data(src.layout, length=100, start=0, step=1) # insert the intervals pipe_data = np.hstack( (src_data[:25], pipes.interval_token(src.layout), src_data[25:50], pipes.interval_token(src.layout), src_data[50:75], pipes.interval_token(src.layout), src_data[75:])) ts = src_data['timestamp'] intervals = [[ts[0], ts[24]], [ts[25], ts[49]], [ts[50], ts[74]], [ts[75], ts[99]]] src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, pipe_data, intervals) return src_data
async def test_stream_create(self): db: Session = self.app["db"] new_stream = DataStream(name="test", datatype=DataStream.DATATYPE.FLOAT32) new_stream.elements = [Element(name="e%d" % j, index=j, display_type=Element.DISPLAYTYPE.CONTINUOUS) for j in range(3)] payload = { "dest_path": "/deep/new folder", "stream": new_stream.to_json() } resp = await self.client.post("/stream.json", json=payload) self.assertEqual(resp.status, 200) # check the stream was created correctly created_stream: DataStream = db.query(DataStream).filter_by(name="test").one() self.assertEqual(len(created_stream.elements), len(new_stream.elements)) self.assertEqual(created_stream.folder.name, "new folder") # can create by dest_id as well folder1: Folder = db.query(Folder).filter_by(name="folder1").one() new_stream.name = "test2" payload = { "dest_id": folder1.id, "stream": new_stream.to_json() } resp = await self.client.post("/stream.json", json=payload) self.assertEqual(resp.status, 200) # check the stream was created correctly created_stream: DataStream = db.query(DataStream).filter_by(name="test2").one() self.assertEqual(len(created_stream.elements), len(new_stream.elements)) self.assertEqual(created_stream.folder.name, "folder1")
def test_does_not_copy_existing_data(self): server = FakeJoule() # create the source and destination streams src_data = create_source_data( server) # helpers.create_data(src.layout) # dest has the same intervals as source so nothing is copied ts = src_data['timestamp'] intervals = server.streams['/test/source'].intervals dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] server.add_stream('/test/destination', dest, StreamInfo(int(ts[0]), int(ts[-1]), len(ts)), src_data, intervals) self.start_server(server) runner = CliRunner() result = runner.invoke(main, [ 'data', 'copy', '--start', str(ts[0]), '--end', str(ts[-1]), '/test/source', '/test/destination' ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) # only the annotations get was called (twice for each interval: src and dest) self.assertTrue(self.msgs.qsize(), len(intervals) * 2) self.stop_server()
def test_copies_all_data(self): server = FakeJoule() # create the source and destination streams src_data = create_source_data( server) # helpers.create_data(src.layout) # dest is empty dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] server.add_stream('/test/destination', dest, StreamInfo(None, None, 0), None) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination']) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) while self.msgs.empty(): time.sleep(0.1) print("waiting...") mock_entry = self.msgs.get() np.testing.assert_array_equal(src_data, mock_entry.data) # self.assertEqual(len(mock_entry.intervals), 3) self.stop_server()
def test_datatype_mismatch(self): # the datatype of the file and target stream must match server = FakeJoule() # create the source stream src = DataStream(id=0, name="dest", keep_us=100, datatype=DataStream.DATATYPE.UINT16) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] file_data = helpers.create_data('int16_3') src_info = StreamInfo(0, 0, 0, 0) server.add_stream('/test/dest', src, src_info, None) self.start_server(server) runner = CliRunner() with tempfile.NamedTemporaryFile() as data_file: write_hd5_data(data_file, file_data) result = runner.invoke( main, ['data', 'ingest', '--file', data_file.name]) self.assertIn("datatype", result.output) self.assertNotEqual(result.exit_code, 0) self.stop_server()
def test_warn_on_different_elements(self): server = FakeJoule() create_source_data(server) dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="different%d" % x, index=x, units='other', display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] server.add_stream('/test/destination', dest, StreamInfo(None, None, 0), None) self.start_server(server) runner = CliRunner() # does not copy without confirmation runner.invoke(main, ['data', 'copy', '/test/source', '/test/destination']) self.assertTrue(self.msgs.empty()) # copies with confirmation loop = asyncio.new_event_loop() loop.set_debug(True) asyncio.set_event_loop(loop) result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination'], input='y\n') mock_entry = self.msgs.get() self.assertTrue(len(mock_entry.data) > 0) self.assertEqual(result.exit_code, 0) self.stop_server()
def test_confirms_data_removal(self): # if there is existing data in the target, confirm removal server = FakeJoule() # create the source stream src = DataStream(id=0, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout, length=1000) # File: |----| # DataStream: |-----| src_info = StreamInfo(int(src_data['timestamp'][500]), int(src_data['timestamp'][-1]), 500) server.add_stream('/test/dest', src, src_info, src_data[500:]) self.start_server(server) runner = CliRunner() with tempfile.NamedTemporaryFile() as data_file: write_hd5_data(data_file, src_data[:750]) result = runner.invoke( main, ['data', 'ingest', '--file', data_file.name], input='N') _print_result_on_error(result) self.assertEqual(result.exit_code, 0) self.assertIn('Cancelled', result.output) self.stop_server()
def test_creates_stream_if_necessary(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout, length=4) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data, [[src_info.start, src_info.end]]) server.add_stream('/test/source', src, src_info, src_data, [[src_info.start, src_info.end]]) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination']) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) while self.msgs.empty(): time.sleep(0.1) print("waiting...") mock_entry = self.msgs.get() np.testing.assert_array_equal(src_data, mock_entry.data) self.stop_server()
def test_when_server_returns_error_code(self): server = FakeJoule() error_msg = "test error" error_code = 500 server.response = error_msg server.stub_stream_destroy = True server.http_code = error_code # actually create a stream so the stubbed API call is the delete one src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] src_info = StreamInfo(0, 0, 0) server.add_stream('/folder/stream', src, src_info, None, []) self.start_server(server) runner = CliRunner() result = runner.invoke(main, ['stream', 'delete', '/folder/stream'], input='y\n') self.assertTrue('%d' % error_code in result.output) self.assertTrue(error_msg in result.output) self.assertEqual(result.exit_code, 1) self.stop_server()
def test_ingests_data_to_empty_existing_stream(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="existing", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] src_data = helpers.create_data(src.layout, length=22000) src_info = StreamInfo(0, 0, 0, 0) server.add_stream('/test/existing', src, src_info, None) self.start_server(server) runner = CliRunner() with tempfile.NamedTemporaryFile() as data_file: write_hd5_data(data_file, src_data) result = runner.invoke(main, [ 'data', 'ingest', '--file', data_file.name, '--stream', '/test/existing' ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) db_obj = self.msgs.get() np.testing.assert_array_equal(src_data, db_obj.data) # uses the stream parameter instead of the hd5 attrs self.assertEqual(db_obj.stream.name, 'existing') self.stop_server()
def test_relationships(self): # root # -folder1 # -stream11 (4 elements) # -folder2 # -stream21 (2 elements) # -stream1 (1 element) # -stream2 (1 element) stream11 = DataStream(name="stream11", datatype=DataStream.DATATYPE.FLOAT32) stream11.elements = [ Element(name="e%d" % x, display_type=Element.DISPLAYTYPE.DISCRETE) for x in range(4) ] folder1 = Folder(name="folder1") folder1.data_streams.append(stream11) stream21 = DataStream(name="stream21", datatype=DataStream.DATATYPE.UINT8) stream21.elements = [ Element(name="e%d" % x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(4) ] folder2 = Folder(name="folder2") folder2.data_streams.append(stream21) stream1 = DataStream(name="stream1", datatype=DataStream.DATATYPE.INT8) stream1.elements.append(Element(name="e0")) stream2 = DataStream(name="stream2", datatype=DataStream.DATATYPE.UINT64) stream2.elements.append(Element(name="e0")) root = Folder(name="root") root.children = [folder1, folder2] root.data_streams = [stream1, stream2] # check downward navigation self.assertEqual(len(root.children[0].data_streams[0].elements), 4) # check upward navigation e = stream11.elements[-1] self.assertEqual(e.stream.folder.parent.name, 'root')
def create_source_data(server): # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)] # source has 100 rows of data src_data = helpers.create_data(src.layout, length=100, start=0, step=1) ts = src_data['timestamp'] intervals = [[ts[0], ts[99]]] src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data, intervals) return src_data
def test_reads_selected_elements_to_file(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.UINT16) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data) self.start_server(server) runner = CliRunner() # add in some extra parameters to make sure they are parsed with tempfile.NamedTemporaryFile() as data_file: result = runner.invoke(main, [ 'data', 'read', '/test/source', '--start', '0', '--end', '1 hour ago', '-i', '0,2', '--file', data_file.name ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) h5_file = h5py.File(data_file.name, 'r') self.assertEqual(src_data['data'].dtype, h5_file['data'].dtype) self.assertEqual(h5_file['timestamp'].dtype, np.dtype('i8')) np.testing.assert_array_almost_equal(h5_file['data'], src_data['data'][:, [0, 2]]) np.testing.assert_array_almost_equal( h5_file['timestamp'], src_data['timestamp'][:, None]) h5_file.close() self.stop_server()
def test_when_server_returns_error_code(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 200 rows of data between [0, 200] in two intervals src_data = np.hstack((helpers.create_data(src.decimated_layout, start=0, length=100, step=1), pipes.interval_token(src.decimated_layout), helpers.create_data(src.decimated_layout, start=100, length=100, step=1))) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data) server.response = "test error" server.http_code = 500 server.stub_data_read = True self.start_server(server) runner = CliRunner() with self.assertLogs(level=logging.ERROR): runner.invoke(main, ['data', 'read', '/test/source', '--start', 'now']) self.stop_server()
def test_incompatible_layouts(self): server = FakeJoule() create_source_data(server) dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(5) ] server.add_stream('/test/destination', dest, StreamInfo(None, None, 0), None) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination']) self.assertTrue('not compatible' in result.output) self.assertEqual(result.exit_code, 1) self.stop_server()
def test_copies_new_data(self): server = FakeJoule() # create the source and destination streams src_data = create_source_data( server) # helpers.create_data(src.layout) # dest has half the data dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # destination is missing first interval but this won't be copied with the --new flag dest_interval = server.streams['/test/source'].intervals[1] dest_data = np.copy(src_data[dest_interval[0]:dest_interval[1]]) server.add_stream( '/test/destination', dest, StreamInfo(int(dest_interval[0]), int(dest_interval[1]), len(dest_data)), dest_data, [dest_interval]) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '--new', '/test/source', '/test/destination']) print(result.output) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) while self.msgs.empty(): time.sleep(0.1) print("waiting...") while not self.msgs.empty(): msg = self.msgs.get() if type(msg) is MockDbEntry: print(msg) self.stop_server()
def test_when_source_is_empty(self): server = FakeJoule() # source has no data src_info = StreamInfo(None, None, 0) # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] server.add_stream('/test/source', src, src_info, np.ndarray([])) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination']) self.assertTrue('Error' in result.output and 'source' in result.output) self.assertEqual(result.exit_code, 1) self.stop_server()
def test_reads_selected_elements(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data) self.start_server(server) runner = CliRunner() # add in some extra parameters to make sure they are parsed result = runner.invoke(main, [ 'data', 'read', '/test/source', '-i', '1,0', '--start', '0', '--end', '1 hour ago' ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) output = result.output.split('\n') for x in range(len(src_data)): row = src_data[x] expected = "%d %s" % (row['timestamp'], ' '.join( '%f' % x for x in row['data'][[1, 0]])) self.assertTrue(expected in output[x + 1]) self.stop_server()
def test_reads_decimated_data(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 200 rows of data between [0, 200] in two intervals src_data = np.hstack((helpers.create_data(src.decimated_layout, start=0, length=100, step=1), pipes.interval_token(src.decimated_layout), helpers.create_data(src.decimated_layout, start=100, length=100, step=1))) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data) self.start_server(server) # mark the intervals and show the bounds runner = CliRunner() result = runner.invoke(main, [ 'data', 'read', '/test/source', '--start', '0', '--end', '1 hour ago', '--max-rows', '28', '--mark-intervals', '--show-bounds' ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) output = result.output.split('\n') for x in range(len(src_data)): row = src_data[x] if row == pipes.interval_token(src.decimated_layout): expected = '# interval break' else: expected = "%d %s" % (row['timestamp'], ' '.join( '%f' % x for x in row['data'])) # import pdb; pdb.set_trace() self.assertTrue(expected in output[x + 1]) # create a new event loop for the next run loop = asyncio.new_event_loop() loop.set_debug(True) asyncio.set_event_loop(loop) # do not mark the intervals and hide the bounds runner = CliRunner() result = runner.invoke(main, [ 'data', 'read', '/test/source', '--start', '0', '--end', '1 hour ago', '--max-rows', '28' ]) self.assertEqual(result.exit_code, 0) output = result.output.split('\n') offset = 0 for x in range(len(src_data)): row = src_data[x] if row == pipes.interval_token(src.decimated_layout): offset = 1 continue else: expected = "%d %s" % (row['timestamp'], ' '.join( '%f' % x for x in row['data'][:3])) self.assertTrue(expected in output[x - offset + 1]) self.stop_server()
async def test_stream_create(self): # must be json resp = await self.client.post("/stream.json", data={"bad_values": "not_json"}) self.assertEqual(resp.status, 400) self.assertIn("json", await resp.text()) # must specify a stream resp = await self.client.post("/stream.json", json={"dest_path": "/folder2/deeper"}) self.assertEqual(resp.status, 400) # invalid dest_path resp = await self.client.post("/stream.json", json={"dest_path": "notapath"}) self.assertEqual(resp.status, 400) # must specify a path new_stream = DataStream(name="test", datatype=DataStream.DATATYPE.FLOAT32) new_stream.elements = [Element(name="e%d" % j, index=j, display_type=Element.DISPLAYTYPE.CONTINUOUS) for j in range(3)] resp = await self.client.post("/stream.json", json={"stream": new_stream.to_json()}) self.assertEqual(resp.status, 400) # element names must be unique e1_name = new_stream.elements[1].name new_stream.elements[1].name = new_stream.elements[2].name resp = await self.client.post("/stream.json", json={ "dest_path": "/folder2/deeper", "stream": new_stream.to_json()}) self.assertEqual(resp.status, 400) self.assertIn("names must be unique", await resp.text()) new_stream.elements[1].name = e1_name # restore the original name # stream must have a unique name in the folder new_stream.name = "stream1" resp = await self.client.post("/stream.json", json={"stream": new_stream.to_json(), "dest_path": "/folder1"}) self.assertEqual(resp.status, 400) self.assertIn("same name", await resp.text()) # invalid dest_path resp = await self.client.post("/stream.json", json={"stream": new_stream.to_json(), "dest_path": "notapath"}) self.assertEqual(resp.status, 400) # stream must have a name new_stream.name = "" resp = await self.client.post("/stream.json", json={"stream": new_stream.to_json(), "dest_path": "/a/valid/path"}) self.assertEqual(resp.status, 400) self.assertIn("name", await resp.text()) new_stream.name = "test" # stream must have at least one element new_stream.elements = [] resp = await self.client.post("/stream.json", json={"stream": new_stream.to_json(), "dest_path": "/a/valid/path"}) self.assertEqual(resp.status, 400) self.assertIn("element", await resp.text()) # invalid stream json (test all different exception paths) resp = await self.client.post("/stream.json", json={"dest_path": "/path/to", "stream": 'notjson'}) self.assertEqual(resp.status, 400) self.assertIn("JSON", await resp.text()) json = new_stream.to_json() json["datatype"] = "invalid" resp = await self.client.post("/stream.json", json={"dest_path": "/path/to", "stream": json}) self.assertEqual(resp.status, 400) self.assertIn("specification", await resp.text()) self.assertIn("datatype", await resp.text()) del json["datatype"] resp = await self.client.post("/stream.json", json={"dest_path": "/path/to", "stream": json}) self.assertEqual(resp.status, 400) self.assertIn("datatype", await resp.text()) # incorrect stream format resp = await self.client.post("/stream.json", json={"path": "/path/to", "stream": '{"invalid": 2}'}) self.assertEqual(resp.status, 400)
def test_merges_config_and_db_streams(self): """e2e stream configuration service test""" # /test/stream1:float32_3 folder_test = Folder(name="test") stream1 = DataStream(name="stream1", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) stream1.elements = [Element(name="e%d" % x, index=x, default_min=1) for x in range(3)] folder_test.data_streams.append(stream1) # /test/deeper/stream2: int8_2 folder_deeper = Folder(name="deeper") stream2 = DataStream(name="stream2", datatype=DataStream.DATATYPE.INT8) stream2.elements = [Element(name="e%d" % x, index=x) for x in range(2)] folder_deeper.data_streams.append(stream2) folder_deeper.parent = folder_test # /test/deeper/stream3: int8_2 stream3 = DataStream(name="stream3", datatype=DataStream.DATATYPE.INT16) stream3.elements = [Element(name="e%d" % x, index=x) for x in range(2)] folder_deeper.data_streams.append(stream3) root = Folder(name="root") root.children = [folder_test] self.db.add(root) self.db.commit() configs = [ # /test/stream1:float32_3 <different element configs and keep> """ [Main] name = stream1 path = /test datatype = float32 keep = all [Element1] name=new_e1 display_type=discrete [Element2] name=new_e2 display_type=event [Element3] name=new_e3 default_min=-10 """, # /new/path/stream4: uint8_2 <a new stream> """ [Main] name = stream4 path = /new/path datatype = uint8 [Element1] name=1 [Element2] name=2 """, # /new/path/stream5: uint8_1 <a new stream> """ [Main] name = stream5 path = /new/path datatype = uint8 [Element1] name=1 """, # /test/deeper/stream2: float32_1 <conflicting layout> """ [Main] name = stream2 path = /test/deeper datatype = float32 [Element1] name = 1 """, # /invalid path//invalid: int64_1 <invalid config (ignored)> """ [Main] name = invalid path = /invalid path// datatype = uint8 keep = all [Element1] name=e1 """, ] with tempfile.TemporaryDirectory() as conf_dir: # write the configs in 0.conf, 1.conf, ... i = 0 for conf in configs: with open(os.path.join(conf_dir, "%d.conf" % i), 'w') as f: f.write(conf) i += 1 with self.assertLogs(logger=logger, level=logging.ERROR) as logs: load_streams.run(conf_dir, self.db) # log the bad path error self.assertRegex(logs.output[0], 'path') # log the incompatible layout error self.assertRegex(logs.output[1], 'layout') # now check the database: # should have 3 streams self.assertEqual(self.db.query(DataStream).count(), 5) # and 7 elements (orphans eliminated) self.assertEqual(self.db.query(Element).count(), 10) # Check stream merging # stream1 should have a new keep value stream1: DataStream = self.db.query(DataStream).filter_by(name="stream1").one() self.assertEqual(stream1.keep_us, DataStream.KEEP_ALL) # its elements should have new attributes self.assertEqual(stream1.elements[0].name, 'new_e1') self.assertEqual(stream1.elements[0].display_type, Element.DISPLAYTYPE.DISCRETE) self.assertEqual(stream1.elements[0].default_min, None) self.assertEqual(stream1.elements[1].name, 'new_e2') self.assertEqual(stream1.elements[1].display_type, Element.DISPLAYTYPE.EVENT) self.assertEqual(stream1.elements[1].default_min, None) self.assertEqual(stream1.elements[2].name, 'new_e3') self.assertEqual(stream1.elements[2].default_min, -10) # Check unconfigured streams are unchanged # /test/deeper/stream2 should be the same stream2: DataStream = self.db.query(DataStream).filter_by(name="stream2").one() self.assertEqual(stream2.layout, 'int8_2') # /test/deeper/stream3 should be the same stream3: DataStream = self.db.query(DataStream).filter_by(name="stream3").one() self.assertEqual(stream3.layout, 'int16_2') # Check new streams are added stream4: DataStream = self.db.query(DataStream).filter_by(name="stream4").one() self.assertEqual(stream4.layout, 'uint8_2') # Check the folder structure # -root # -test # -[stream1] # -deeper # -[stream2] # -new # -path # -[stream3] self.assertEqual(len(root.children), 2) for f in root.children: if f.name == 'test': self.assertEqual(len(f.data_streams), 1) self.assertEqual(f.data_streams[0].name, 'stream1') self.assertEqual(len(f.children), 1) deeper = f.children[0] self.assertEqual(deeper.name, "deeper") self.assertEqual(len(deeper.children), 0) self.assertEqual(len(deeper.data_streams), 2) self.assertEqual(deeper.data_streams[0].name, 'stream2') self.assertEqual(deeper.data_streams[1].name, 'stream3') elif f.name == 'new': self.assertEqual(len(f.data_streams), 0) self.assertEqual(len(f.children), 1) path = f.children[0] self.assertEqual(path.name, "path") self.assertEqual(len(path.children), 0) self.assertEqual(len(path.data_streams), 2) for stream in path.data_streams: self.assertTrue(stream.name in ['stream4', 'stream5']) else: self.fail("unexpected name: " + f.name)
def test_parses_configs(self): """e2e module configuration service test""" # /test/stream1:float32_3 folder_test = Folder(name="test") stream1 = DataStream(name="stream1", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) stream1.elements = [ Element(name="e%d" % x, index=x, default_min=1) for x in range(3) ] folder_test.data_streams.append(stream1) # /test/deeper/stream2: int8_2 folder_deeper = Folder(name="deeper") stream2 = DataStream(name="stream2", datatype=DataStream.DATATYPE.INT8) stream2.elements = [Element(name="e%d" % x, index=x) for x in range(2)] folder_deeper.data_streams.append(stream2) folder_deeper.parent = folder_test root = Folder(name="root") root.children = [folder_test] self.db.add(root) self.db.commit() configs = [ # writes to /test/stream1 """ [Main] name = module1 exec_cmd = runit.sh [Arguments] key = value # no inputs [Outputs] raw = /test/stream1 """, # reads from /test/stream1, writes to /test/deeper/stream2 and /test/stream3 """ [Main] name = module2 exec_cmd = runit2.sh [Inputs] source = /test/stream1:float32[e0,e1, e2] [Outputs] sink1 = /test/deeper/stream2 sink2 = /test/stream3:uint8[ x, y ] """, # ignored: unconfigured input """ [Main] name = bad_module exec_cmd = runit3.sh [Inputs] source = /missing/stream # no outputs """, # ignored: mismatched stream config """ [Main] name = bad_module2 exec_cmd = runit4.sh [Inputs] source = /test/stream3:uint8[x,y,z] [Outputs] """, ] with tempfile.TemporaryDirectory() as conf_dir: # write the configs in 0.conf, 1.conf, ... i = 0 for conf in configs: with open(os.path.join(conf_dir, "%d.conf" % i), 'w') as f: f.write(conf) i += 1 with self.assertLogs(logger=logger, level=logging.ERROR) as logs: modules = load_modules.run(conf_dir, self.db) output = ' '.join(logs.output) # log the missing stream configuration self.assertIn('/missing/stream', output) # log the incompatible stream configuration self.assertIn('different elements', output) # now check the database: # should have three streams self.assertEqual(self.db.query(DataStream).count(), 3) # and two modules self.assertEqual(len(modules), 2) # module1 should have no inputs and one output m1: Module = [m for m in modules if m.name == "module1"][0] self.assertEqual(len(m1.inputs), 0) self.assertEqual(len(m1.outputs), 1) self.assertEqual(m1.outputs["raw"], stream1) # module2 should have 1 input and 2 outputs m2: Module = [m for m in modules if m.name == "module2"][0] self.assertEqual(len(m2.inputs), 1) self.assertEqual(len(m2.outputs), 2) self.assertEqual(m2.inputs["source"], stream1) self.assertEqual(m2.outputs['sink1'], stream2) # sink2 goes to a new stream stream3 = self.db.query(DataStream).filter_by(name="stream3").one() self.assertEqual(m2.outputs['sink2'], stream3)