def test_query_with_invalid_filter_field(self): n = 20 channels = ["ch1", "ch2", "TRG"] channel_count = len(channels) b = Buffer(channels=channels) for i, d in enumerate(_mockdata(n, channel_count)): timestamp = float(i) b.append(Record(d, timestamp, None)) with pytest.raises(Exception): b.query_data(filters=[("ch3", ">", 0)]) b.cleanup()
def test_query_with_invalid_filter_field(self): """Test query with invalid filter field.""" n_records = 20 channels = ["ch1", "ch2", "TRG"] channel_count = len(channels) buf = Buffer(channels=channels) for i, data in enumerate(mock_data(n_records, channel_count)): timestamp = float(i) buf.append(Record(data, timestamp, None)) with pytest.raises(Exception): buf.query_data(filters=[("ch3", ">", 0)]) buf.cleanup()
def test_query_data(self): """Test querying for data.""" n_records = 20 channels = ["ch1", "ch2", "TRG"] trg_index = -1 channel_count = len(channels) buf = Buffer(channels=channels) for i, data in enumerate(mock_data(n_records, channel_count)): data[trg_index] = 1.0 if i >= 10 else 0.0 timestamp = float(i) buf.append(Record(data, timestamp, None)) rows = buf.query_data(filters=[("TRG", ">", 0)], ordering=("timestamp", "asc"), max_results=1) self.assertEqual(len(rows), 1, "Should have limited to max_results.") self.assertEqual(rows[0].data[trg_index], 1.0, "Should get filtered data.") self.assertEqual(rows[0].timestamp, 10.0, "Should get first instance") buf.cleanup()
def test_query_with_invalid_order_direction(self): n = 20 channels = ["ch1", "ch2", "TRG"] trg_index = -1 channel_count = len(channels) b = Buffer(channels=channels) for i, d in enumerate(_mockdata(n, channel_count)): d[trg_index] = 1.0 if i >= 10 else 0.0 timestamp = float(i) b.append(Record(d, timestamp, None)) with pytest.raises(Exception): b.query_data(ordering=("ch1", "ascending")) b.cleanup()
def test_query_with_invalid_order_direction(self): """Test query with invalid order direction""" n_records = 20 channels = ["ch1", "ch2", "TRG"] trg_index = -1 channel_count = len(channels) buf = Buffer(channels=channels) for i, data in enumerate(mock_data(n_records, channel_count)): data[trg_index] = 1.0 if i >= 10 else 0.0 timestamp = float(i) buf.append(Record(data, timestamp, None)) with pytest.raises(Exception): buf.query_data(ordering=("ch1", "ascending")) buf.cleanup()
def _loop(msg_queue, response_queue, channels, archive_name): """Main server loop. Intended to be a Process target (and private to this module). Accepts messages through its mailbox queue, and takes the appropriate action based on the command and parameters contained within the message. Parameters ---------- msq_queue : Queue Used for receiving inter-process communication. response_queue : Queue Used for pushing responses channels : list of str list of channel names in the underlying data table. Any records written to the buffer are expected to have an entry for each channel. archive_name : str sqlite database name """ buf = Buffer(channels=channels, archive_name=archive_name) while True: # Messages should be tuples with the structure: # (command, params) msg = msg_queue.get() command, params = msg if command == MSG_EXIT: buf.cleanup(delete_archive=params) response_queue.put(('exit', 'ok')) break elif command == MSG_PUT: # params is the record to put buf.append(params) elif command == MSG_GET_ALL: response_queue.put(buf.all()) elif command == MSG_COUNT: response_queue.put(len(buf)) elif command == MSG_QUERY_SLICE: row_start, row_end, field = params log.debug("Sending query: %s", (row_start, row_end, field)) response_queue.put(buf.query(row_start, row_end, field)) elif command == MSG_QUERY: # Generic query filters, ordering, max_results = params response_queue.put(buf.query_data(filters, ordering, max_results)) elif command == MSG_STARTED: response_queue.put(('started', 'ok')) elif command == MSG_DUMP_RAW_DATA: buf.dump_raw_data(*params) response_queue.put(('raw_data', 'ok')) else: log.debug("Error; message not understood: %s", msg)