def create_source_data(server, no_intervals=False): # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data in four intervals between [0, 100] src_data = helpers.create_data(src.layout, length=100, start=1548353881 * 1e6, step=1e6) ts = src_data['timestamp'] if no_intervals: intervals = [] src_info = StreamInfo(None, None, 0) else: intervals = [[ts[0], ts[24]], [ts[25], ts[49]], [ts[50], ts[74]], [ts[75], ts[99]]] src_info = StreamInfo(intervals[0][0], intervals[-1][1], len(src_data)) server.add_stream('/test/source', src, src_info, src_data, intervals) return intervals
def create_source_data(server): # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data in four intervals between [0, 100] src_data = helpers.create_data(src.layout, length=100, start=0, step=1) # insert the intervals pipe_data = np.hstack( (src_data[:25], pipes.interval_token(src.layout), src_data[25:50], pipes.interval_token(src.layout), src_data[50:75], pipes.interval_token(src.layout), src_data[75:])) ts = src_data['timestamp'] intervals = [[ts[0], ts[24]], [ts[25], ts[49]], [ts[50], ts[74]], [ts[75], ts[99]]] src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, pipe_data, intervals) return src_data
def test_warn_on_different_elements(self): server = FakeJoule() create_source_data(server) dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="different%d" % x, index=x, units='other', display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] server.add_stream('/test/destination', dest, StreamInfo(None, None, 0), None) self.start_server(server) runner = CliRunner() # does not copy without confirmation runner.invoke(main, ['data', 'copy', '/test/source', '/test/destination']) self.assertTrue(self.msgs.empty()) # copies with confirmation loop = asyncio.new_event_loop() loop.set_debug(True) asyncio.set_event_loop(loop) result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination'], input='y\n') mock_entry = self.msgs.get() self.assertTrue(len(mock_entry.data) > 0) self.assertEqual(result.exit_code, 0) self.stop_server()
def test_ingests_data_to_empty_existing_stream(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="existing", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] src_data = helpers.create_data(src.layout, length=22000) src_info = StreamInfo(0, 0, 0, 0) server.add_stream('/test/existing', src, src_info, None) self.start_server(server) runner = CliRunner() with tempfile.NamedTemporaryFile() as data_file: write_hd5_data(data_file, src_data) result = runner.invoke(main, [ 'data', 'ingest', '--file', data_file.name, '--stream', '/test/existing' ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) db_obj = self.msgs.get() np.testing.assert_array_equal(src_data, db_obj.data) # uses the stream parameter instead of the hd5 attrs self.assertEqual(db_obj.stream.name, 'existing') self.stop_server()
def test_copies_all_data(self): server = FakeJoule() # create the source and destination streams src_data = create_source_data( server) # helpers.create_data(src.layout) # dest is empty dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] server.add_stream('/test/destination', dest, StreamInfo(None, None, 0), None) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination']) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) while self.msgs.empty(): time.sleep(0.1) print("waiting...") mock_entry = self.msgs.get() np.testing.assert_array_equal(src_data, mock_entry.data) # self.assertEqual(len(mock_entry.intervals), 3) self.stop_server()
def test_datatype_mismatch(self): # the datatype of the file and target stream must match server = FakeJoule() # create the source stream src = DataStream(id=0, name="dest", keep_us=100, datatype=DataStream.DATATYPE.UINT16) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] file_data = helpers.create_data('int16_3') src_info = StreamInfo(0, 0, 0, 0) server.add_stream('/test/dest', src, src_info, None) self.start_server(server) runner = CliRunner() with tempfile.NamedTemporaryFile() as data_file: write_hd5_data(data_file, file_data) result = runner.invoke( main, ['data', 'ingest', '--file', data_file.name]) self.assertIn("datatype", result.output) self.assertNotEqual(result.exit_code, 0) self.stop_server()
def test_creates_stream_if_necessary(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout, length=4) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data, [[src_info.start, src_info.end]]) server.add_stream('/test/source', src, src_info, src_data, [[src_info.start, src_info.end]]) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination']) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) while self.msgs.empty(): time.sleep(0.1) print("waiting...") mock_entry = self.msgs.get() np.testing.assert_array_equal(src_data, mock_entry.data) self.stop_server()
def test_confirms_data_removal(self): # if there is existing data in the target, confirm removal server = FakeJoule() # create the source stream src = DataStream(id=0, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout, length=1000) # File: |----| # DataStream: |-----| src_info = StreamInfo(int(src_data['timestamp'][500]), int(src_data['timestamp'][-1]), 500) server.add_stream('/test/dest', src, src_info, src_data[500:]) self.start_server(server) runner = CliRunner() with tempfile.NamedTemporaryFile() as data_file: write_hd5_data(data_file, src_data[:750]) result = runner.invoke( main, ['data', 'ingest', '--file', data_file.name], input='N') _print_result_on_error(result) self.assertEqual(result.exit_code, 0) self.assertIn('Cancelled', result.output) self.stop_server()
def test_when_server_returns_error_code(self): server = FakeJoule() error_msg = "test error" error_code = 500 server.response = error_msg server.stub_stream_destroy = True server.http_code = error_code # actually create a stream so the stubbed API call is the delete one src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] src_info = StreamInfo(0, 0, 0) server.add_stream('/folder/stream', src, src_info, None, []) self.start_server(server) runner = CliRunner() result = runner.invoke(main, ['stream', 'delete', '/folder/stream'], input='y\n') self.assertTrue('%d' % error_code in result.output) self.assertTrue(error_msg in result.output) self.assertEqual(result.exit_code, 1) self.stop_server()
def test_creates_nilmdb_stream_if_necessary(self): source_server = FakeJoule() nilmdb_msgs = multiprocessing.Queue() dest_url = self._start_nilmdb(nilmdb_msgs) # create just the source stream src_data = create_source_data(source_server) # helpers.create_data(src.layout) src_stream = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src_stream.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)] source_server.add_stream('/test/destination', src_stream, StreamInfo(None, None, 0), None) self.start_server(source_server) runner = CliRunner() result = runner.invoke(main, ['data', 'copy', '-d', dest_url, '/test/source', '/test/destination'], catch_exceptions=False) _print_result_on_error(result) # expect a stream create call nilmdb_call = nilmdb_msgs.get() self.assertEqual('stream_create', nilmdb_call['action']) self.assertEqual({'path': '/test/destination', 'layout': 'float32_3'}, nilmdb_call['params']) # expect a metadata call nilmdb_call = nilmdb_msgs.get() self.assertEqual('set_metadata', nilmdb_call['action']) self.assertEqual('/test/destination', nilmdb_call['params']['path']) self.assertEqual('config_key__', list(json.loads(nilmdb_call['params']['data']).keys())[0]) # expect data transfer call nilmdb_call = nilmdb_msgs.get() self.assertEqual('stream_insert', nilmdb_call['action']) data = nilmdb_call['data'] np.testing.assert_array_equal(src_data, data) self.assertEqual(0, result.exit_code) self._stop_nilmdb() self.stop_server() del nilmdb_msgs
def test_copies_data_to_nilmdb(self): source_server = FakeJoule() nilmdb_msgs = multiprocessing.Queue() dest_url = self._start_nilmdb(nilmdb_msgs) # create the source and destination streams src_data = create_source_data(source_server) # helpers.create_data(src.layout) src_stream = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src_stream.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)] source_server.add_stream('/test/destination', src_stream, StreamInfo(None, None, 0), None) self.start_server(source_server) runner = CliRunner() result = runner.invoke(main, ['data', 'copy', '-d', dest_url, '/test/source', '/existing/float32_3'], input='y\n', catch_exceptions=True) _print_result_on_error(result) # expect data transfer call nilmdb_call = nilmdb_msgs.get() self.assertEqual('stream_insert', nilmdb_call['action']) data = nilmdb_call['data'] np.testing.assert_array_equal(src_data, data) self.assertEqual(0, result.exit_code) self._stop_nilmdb() self.stop_server() del nilmdb_msgs
def test_does_not_copy_existing_data(self): server = FakeJoule() # create the source and destination streams src_data = create_source_data( server) # helpers.create_data(src.layout) # dest has the same intervals as source so nothing is copied ts = src_data['timestamp'] intervals = server.streams['/test/source'].intervals dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] server.add_stream('/test/destination', dest, StreamInfo(int(ts[0]), int(ts[-1]), len(ts)), src_data, intervals) self.start_server(server) runner = CliRunner() result = runner.invoke(main, [ 'data', 'copy', '--start', str(ts[0]), '--end', str(ts[-1]), '/test/source', '/test/destination' ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) # only the annotations get was called (twice for each interval: src and dest) self.assertTrue(self.msgs.qsize(), len(intervals) * 2) self.stop_server()
def test_when_destination_is_invalid(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout, length=4) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, np.ndarray([])) self.start_server(server) runner = CliRunner() result = runner.invoke(main, ['data', 'copy', '/test/source', 'badpath']) self.assertTrue('Error' in result.output and 'destination' in result.output) self.assertEqual(result.exit_code, 1) self.stop_server()
async def test_stream_list(self): db: Session = self.app["db"] my_stream: DataStream = db.query(DataStream).filter_by(name="stream1").one() store: MockStore = self.app["data-store"] mock_info = StreamInfo(start=0, end=100, rows=200) store.set_info(my_stream, mock_info) resp = await self.client.request("GET", "/folders.json") actual = await resp.json() # basic check to see if JSON response matches database structure expected = folder.root(db).to_json({my_stream.id: mock_info}) self.assertEqual(actual, expected)
def create_source_data(server): # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)] # source has 100 rows of data src_data = helpers.create_data(src.layout, length=100, start=0, step=1) ts = src_data['timestamp'] intervals = [[ts[0], ts[99]]] src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data, intervals) return src_data
async def create_stream(self, request: web.Request): if self.stub_stream_create: return web.Response(text=self.response, status=self.http_code) body = await request.json() path = body['dest_path'] if path == '': # check for invalid value (eg) return web.Response(text='invalid request', status=400) new_stream = data_stream.from_json(body['stream']) if new_stream.id is None: new_stream.id = 150 else: new_stream.id += 100 # give the stream a unique id self.streams[path + '/' + new_stream.name] = MockDbEntry( new_stream, StreamInfo(None, None, None)) return web.json_response(data=new_stream.to_json())
async def test_stream_info(self): db: Session = self.app["db"] my_stream: DataStream = db.query(DataStream).filter_by(name="stream1").one() store: MockStore = self.app["data-store"] mock_info = StreamInfo(start=0, end=100, rows=200) store.set_info(my_stream, mock_info) # can query by id resp = await self.client.request("GET", "/stream.json?id=%d" % my_stream.id) actual = await resp.json() expected = my_stream.to_json({my_stream.id: mock_info}) self.assertEqual(actual, expected) # can query by path payload = {'path': "/folder1/stream1"} resp = await self.client.request("GET", "/stream.json", params=payload) actual = await resp.json() self.assertEqual(actual, expected)
def test_reads_selected_elements_to_file(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.UINT16) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data) self.start_server(server) runner = CliRunner() # add in some extra parameters to make sure they are parsed with tempfile.NamedTemporaryFile() as data_file: result = runner.invoke(main, [ 'data', 'read', '/test/source', '--start', '0', '--end', '1 hour ago', '-i', '0,2', '--file', data_file.name ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) h5_file = h5py.File(data_file.name, 'r') self.assertEqual(src_data['data'].dtype, h5_file['data'].dtype) self.assertEqual(h5_file['timestamp'].dtype, np.dtype('i8')) np.testing.assert_array_almost_equal(h5_file['data'], src_data['data'][:, [0, 2]]) np.testing.assert_array_almost_equal( h5_file['timestamp'], src_data['timestamp'][:, None]) h5_file.close() self.stop_server()
def test_when_server_returns_error_code(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 200 rows of data between [0, 200] in two intervals src_data = np.hstack((helpers.create_data(src.decimated_layout, start=0, length=100, step=1), pipes.interval_token(src.decimated_layout), helpers.create_data(src.decimated_layout, start=100, length=100, step=1))) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data) server.response = "test error" server.http_code = 500 server.stub_data_read = True self.start_server(server) runner = CliRunner() with self.assertLogs(level=logging.ERROR): runner.invoke(main, ['data', 'read', '/test/source', '--start', 'now']) self.stop_server()
def test_incompatible_layouts(self): server = FakeJoule() create_source_data(server) dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(5) ] server.add_stream('/test/destination', dest, StreamInfo(None, None, 0), None) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination']) self.assertTrue('not compatible' in result.output) self.assertEqual(result.exit_code, 1) self.stop_server()
def test_copies_new_data(self): server = FakeJoule() # create the source and destination streams src_data = create_source_data( server) # helpers.create_data(src.layout) # dest has half the data dest = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) dest.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # destination is missing first interval but this won't be copied with the --new flag dest_interval = server.streams['/test/source'].intervals[1] dest_data = np.copy(src_data[dest_interval[0]:dest_interval[1]]) server.add_stream( '/test/destination', dest, StreamInfo(int(dest_interval[0]), int(dest_interval[1]), len(dest_data)), dest_data, [dest_interval]) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '--new', '/test/source', '/test/destination']) print(result.output) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) while self.msgs.empty(): time.sleep(0.1) print("waiting...") while not self.msgs.empty(): msg = self.msgs.get() if type(msg) is MockDbEntry: print(msg) self.stop_server()
def test_when_source_is_empty(self): server = FakeJoule() # source has no data src_info = StreamInfo(None, None, 0) # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] server.add_stream('/test/source', src, src_info, np.ndarray([])) self.start_server(server) runner = CliRunner() result = runner.invoke( main, ['data', 'copy', '/test/source', '/test/destination']) self.assertTrue('Error' in result.output and 'source' in result.output) self.assertEqual(result.exit_code, 1) self.stop_server()
async def _retrieve_nilmdb_stream( server: str, path: str) -> Tuple[Optional[DataStream], Optional[StreamInfo]]: url = "{server}/stream/get_metadata".format(server=server) params = {"path": path, "key": 'config_key__'} async with aiohttp.ClientSession() as session: async with session.get(url, params=params) as resp: if resp.status == 404: return None, None if not resp.status == 200: raise errors.ApiError("[%s]: %s" % (server, resp.text)) default_name = path.split("/")[-1] config_data = {'name': default_name} try: metadata = await resp.json() config_data = { **config_data, **(json.loads(metadata['config_key__'])) } except (KeyError, ValueError): # missing or corrupt configuration data pass # now get the stream info data url = "{server}/stream/list".format(server=server) params = {"path": path, "extended": '1'} async with session.get(url, params=params) as resp: info = (await resp.json())[0] my_stream = data_stream.from_nilmdb_metadata(config_data, info[1]) my_info = StreamInfo( start=info[2], end=info[3], rows=info[4], bytes=-1, # signal that this field is invalid total_time=info[5]) return my_stream, my_info
def test_reads_selected_elements(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 100 rows of data between [0, 100] src_data = helpers.create_data(src.layout) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data) self.start_server(server) runner = CliRunner() # add in some extra parameters to make sure they are parsed result = runner.invoke(main, [ 'data', 'read', '/test/source', '-i', '1,0', '--start', '0', '--end', '1 hour ago' ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) output = result.output.split('\n') for x in range(len(src_data)): row = src_data[x] expected = "%d %s" % (row['timestamp'], ' '.join( '%f' % x for x in row['data'][[1, 0]])) self.assertTrue(expected in output[x + 1]) self.stop_server()
def test_reads_decimated_data(self): server = FakeJoule() # create the source stream src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32) src.elements = [ Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3) ] # source has 200 rows of data between [0, 200] in two intervals src_data = np.hstack((helpers.create_data(src.decimated_layout, start=0, length=100, step=1), pipes.interval_token(src.decimated_layout), helpers.create_data(src.decimated_layout, start=100, length=100, step=1))) src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]), len(src_data)) server.add_stream('/test/source', src, src_info, src_data) self.start_server(server) # mark the intervals and show the bounds runner = CliRunner() result = runner.invoke(main, [ 'data', 'read', '/test/source', '--start', '0', '--end', '1 hour ago', '--max-rows', '28', '--mark-intervals', '--show-bounds' ]) _print_result_on_error(result) self.assertEqual(result.exit_code, 0) output = result.output.split('\n') for x in range(len(src_data)): row = src_data[x] if row == pipes.interval_token(src.decimated_layout): expected = '# interval break' else: expected = "%d %s" % (row['timestamp'], ' '.join( '%f' % x for x in row['data'])) # import pdb; pdb.set_trace() self.assertTrue(expected in output[x + 1]) # create a new event loop for the next run loop = asyncio.new_event_loop() loop.set_debug(True) asyncio.set_event_loop(loop) # do not mark the intervals and hide the bounds runner = CliRunner() result = runner.invoke(main, [ 'data', 'read', '/test/source', '--start', '0', '--end', '1 hour ago', '--max-rows', '28' ]) self.assertEqual(result.exit_code, 0) output = result.output.split('\n') offset = 0 for x in range(len(src_data)): row = src_data[x] if row == pipes.interval_token(src.decimated_layout): offset = 1 continue else: expected = "%d %s" % (row['timestamp'], ' '.join( '%f' % x for x in row['data'][:3])) self.assertTrue(expected in output[x - offset + 1]) self.stop_server()