def test_query_data(self): """Test querying for data.""" n_records = 20 channels = ["ch1", "ch2", "TRG"] trg_index = -1 channel_count = len(channels) buf = Buffer(channels=channels) for i, data in enumerate(mock_data(n_records, channel_count)): data[trg_index] = 1.0 if i >= 10 else 0.0 timestamp = float(i) buf.append(Record(data, timestamp, None)) rows = buf.query_data(filters=[("TRG", ">", 0)], ordering=("timestamp", "asc"), max_results=1) self.assertEqual(len(rows), 1, "Should have limited to max_results.") self.assertEqual(rows[0].data[trg_index], 1.0, "Should get filtered data.") self.assertEqual(rows[0].timestamp, 10.0, "Should get first instance") buf.cleanup()
def _loop(msg_queue, response_queue, channels, archive_name): """Main server loop. Intended to be a Process target (and private to this module). Accepts messages through its mailbox queue, and takes the appropriate action based on the command and parameters contained within the message. Parameters ---------- msq_queue : Queue Used for receiving inter-process communication. response_queue : Queue Used for pushing responses channels : list of str list of channel names in the underlying data table. Any records written to the buffer are expected to have an entry for each channel. archive_name : str sqlite database name """ buf = Buffer(channels=channels, archive_name=archive_name) while True: # Messages should be tuples with the structure: # (command, params) msg = msg_queue.get() command, params = msg if command == MSG_EXIT: buf.cleanup(delete_archive=params) response_queue.put(('exit', 'ok')) break elif command == MSG_PUT: # params is the record to put buf.append(params) elif command == MSG_GET_ALL: response_queue.put(buf.all()) elif command == MSG_COUNT: response_queue.put(len(buf)) elif command == MSG_QUERY_SLICE: row_start, row_end, field = params log.debug("Sending query: %s", (row_start, row_end, field)) response_queue.put(buf.query(row_start, row_end, field)) elif command == MSG_QUERY: # Generic query filters, ordering, max_results = params response_queue.put(buf.query_data(filters, ordering, max_results)) elif command == MSG_STARTED: response_queue.put(('started', 'ok')) elif command == MSG_DUMP_RAW_DATA: buf.dump_raw_data(*params) response_queue.put(('raw_data', 'ok')) else: log.debug("Error; message not understood: %s", msg)
def test_len(self): """Test buffer len.""" n = 1000 channel_count = 25 channels = ["ch" + str(c) for c in range(channel_count)] b = Buffer(channels=channels) for i, d in enumerate(_mockdata(n, channel_count)): b.append(Record(d, float(i), None)) self.assertEqual(len(b), n) b.cleanup()
def test_len(self): """Test buffer len.""" n_records = 1000 channel_count = 25 channels = ["ch" + str(c) for c in range(channel_count)] buf = Buffer(channels=channels) for i, data in enumerate(mock_data(n_records, channel_count)): buf.append(Record(data, float(i), None)) self.assertEqual(len(buf), n_records) buf.cleanup()
def test_query_with_invalid_filter_field(self): n = 20 channels = ["ch1", "ch2", "TRG"] channel_count = len(channels) b = Buffer(channels=channels) for i, d in enumerate(_mockdata(n, channel_count)): timestamp = float(i) b.append(Record(d, timestamp, None)) with pytest.raises(Exception): b.query_data(filters=[("ch3", ">", 0)]) b.cleanup()
def test_query_with_invalid_filter_field(self): """Test query with invalid filter field.""" n_records = 20 channels = ["ch1", "ch2", "TRG"] channel_count = len(channels) buf = Buffer(channels=channels) for i, data in enumerate(mock_data(n_records, channel_count)): timestamp = float(i) buf.append(Record(data, timestamp, None)) with pytest.raises(Exception): buf.query_data(filters=[("ch3", ">", 0)]) buf.cleanup()
def test_query_with_invalid_order_direction(self): n = 20 channels = ["ch1", "ch2", "TRG"] trg_index = -1 channel_count = len(channels) b = Buffer(channels=channels) for i, d in enumerate(_mockdata(n, channel_count)): d[trg_index] = 1.0 if i >= 10 else 0.0 timestamp = float(i) b.append(Record(d, timestamp, None)) with pytest.raises(Exception): b.query_data(ordering=("ch1", "ascending")) b.cleanup()
def test_query_with_invalid_order_direction(self): """Test query with invalid order direction""" n_records = 20 channels = ["ch1", "ch2", "TRG"] trg_index = -1 channel_count = len(channels) buf = Buffer(channels=channels) for i, data in enumerate(mock_data(n_records, channel_count)): data[trg_index] = 1.0 if i >= 10 else 0.0 timestamp = float(i) buf.append(Record(data, timestamp, None)) with pytest.raises(Exception): buf.query_data(ordering=("ch1", "ascending")) buf.cleanup()
def test_query_before_flush(self): """If a query is made before chunksize records have been written, the data should still be available.""" n = 1000 channel_count = 25 channels = ["ch" + str(c) for c in range(channel_count)] b = Buffer(channels=channels, chunksize=10000) for i, d in enumerate(_mockdata(n, channel_count)): timestamp = float(i) b.append(Record(d, timestamp, None)) rows = b.query(start=b.start_time, field='timestamp') self.assertEqual(len(rows), n) self.assertEqual(len(b.all()), n) b.cleanup()
def test_latest(self): """Test query for most recent items.""" n = 1000 latest_n = 100 channel_count = 25 channels = ["ch" + str(c) for c in range(channel_count)] b = Buffer(channels=channels) latest = [] for i, d in enumerate(_mockdata(n, channel_count)): timestamp = float(i) if i >= n - latest_n: latest.append((d, timestamp, i + 1)) b.append(Record(d, timestamp, None)) rows = b.latest(latest_n) for j, item in enumerate(reversed(latest)): self.assertEqual(item, rows[j]) b.cleanup()
def test_buffer(self): """Test Buffer functionality.""" n_records = 15000 channel_count = 25 channels = ["ch" + str(c) for c in range(channel_count)] buf = Buffer(channels=channels, chunksize=10000) append_timer = _Timer() timevalues = {} for i, data in enumerate(mock_data(n_records, channel_count)): timestamp = float(i) if i % 1000 == 0: timevalues[timestamp] = data with append_timer: buf.append(Record(data, timestamp, None)) self.assertEqual(buf.start_time, 0.0) starttime = 0.0 rows = buf.query(start=starttime, end=starttime + 1.0, field='timestamp') self.assertEqual(len(rows), 1, "Results should not include end value.") self.assertEqual(rows[0].timestamp, starttime) self.assertEqual(rows[0].data, timevalues[starttime]) start_time = 1000.0 end_time = 2000.0 rows = buf.query(start=start_time, end=end_time, field='timestamp') self.assertEqual(len(rows), start_time) self.assertEqual(rows[0].data, timevalues[start_time]) rows = buf.query(start=buf.start_time, field='timestamp') self.assertEqual( len(rows), n_records, ("Providing only the start should return " "the rest.")) buf.cleanup()