def _loop(msg_queue, response_queue, channels, archive_name): """Main server loop. Intended to be a Process target (and private to this module). Accepts messages through its mailbox queue, and takes the appropriate action based on the command and parameters contained within the message. Parameters ---------- msq_queue : Queue Used for receiving inter-process communication. response_queue : Queue Used for pushing responses channels : list of str list of channel names in the underlying data table. Any records written to the buffer are expected to have an entry for each channel. archive_name : str sqlite database name """ buf = Buffer(channels=channels, archive_name=archive_name) while True: # Messages should be tuples with the structure: # (command, params) msg = msg_queue.get() command, params = msg if command == MSG_EXIT: buf.cleanup(delete_archive=params) response_queue.put(('exit', 'ok')) break elif command == MSG_PUT: # params is the record to put buf.append(params) elif command == MSG_GET_ALL: response_queue.put(buf.all()) elif command == MSG_COUNT: response_queue.put(len(buf)) elif command == MSG_QUERY_SLICE: row_start, row_end, field = params log.debug("Sending query: %s", (row_start, row_end, field)) response_queue.put(buf.query(row_start, row_end, field)) elif command == MSG_QUERY: # Generic query filters, ordering, max_results = params response_queue.put(buf.query_data(filters, ordering, max_results)) elif command == MSG_STARTED: response_queue.put(('started', 'ok')) elif command == MSG_DUMP_RAW_DATA: buf.dump_raw_data(*params) response_queue.put(('raw_data', 'ok')) else: log.debug("Error; message not understood: %s", msg)
def test_query_before_flush(self): """If a query is made before chunksize records have been written, the data should still be available.""" n = 1000 channel_count = 25 channels = ["ch" + str(c) for c in range(channel_count)] b = Buffer(channels=channels, chunksize=10000) for i, d in enumerate(_mockdata(n, channel_count)): timestamp = float(i) b.append(Record(d, timestamp, None)) rows = b.query(start=b.start_time, field='timestamp') self.assertEqual(len(rows), n) self.assertEqual(len(b.all()), n) b.cleanup()