Пример #1
0
def main():
    """Test script"""
    import numpy as np
    from bcipy.acquisition.record import Record
    import timeit

    n_rows = 1000
    channel_count = 25
    channels = ["ch" + str(c) for c in range(channel_count)]

    pid1 = start(channels, 'buffer1.db')
    pid2 = start(channels, 'buffer2.db')

    starttime = timeit.default_timer()
    for i in range(n_rows):
        data = [np.random.uniform(-1000, 1000) for _ in range(channel_count)]
        if i % 2 == 0:
            append(pid1, Record(data, i, None))
        else:
            append(pid2, Record(data, i, None))

    endtime = timeit.default_timer()
    totaltime = endtime - starttime

    print("Records inserted in buffer 1: {}".format(count(pid1)))
    print("Records inserted in buffer 2: {}".format(count(pid2)))

    print("Total insert time: " + str(totaltime))

    query_n = 5
    data = get_data(pid1, 0, query_n)
    print("Sample records from buffer 1 (query < {}): {}".format(
        query_n, data))
    stop(pid1)
    stop(pid2)
Пример #2
0
    def test_query_data(self):
        """Test querying for data."""
        n_records = 20

        channels = ["ch1", "ch2", "TRG"]
        trg_index = -1
        channel_count = len(channels)

        buf = Buffer(channels=channels)

        for i, data in enumerate(mock_data(n_records, channel_count)):
            data[trg_index] = 1.0 if i >= 10 else 0.0
            timestamp = float(i)
            buf.append(Record(data, timestamp, None))

        rows = buf.query_data(filters=[("TRG", ">", 0)],
                              ordering=("timestamp", "asc"),
                              max_results=1)

        self.assertEqual(len(rows), 1, "Should have limited to max_results.")
        self.assertEqual(rows[0].data[trg_index], 1.0,
                         "Should get filtered data.")
        self.assertEqual(rows[0].timestamp, 10.0,
                         "Should get first instance")
        buf.cleanup()
Пример #3
0
    def test_count(self):
        n = 500
        for i in range(n):
            d = self._new_data()
            buffer_server.append(self.pid,
                                 Record(data=d, timestamp=i, rownum=None))

        self.assertEqual(buffer_server.count(self.pid), n)
Пример #4
0
    def test_count(self):
        """Test that the count of records is correct."""
        n_records = 500
        for i, data in enumerate(mock_data(n_records, self.channel_count)):
            buffer_server.append(self.pid,
                                 Record(data=data, timestamp=i, rownum=None))

        self.assertEqual(buffer_server.count(self.pid), n_records)
Пример #5
0
def _convert_row(row):
    """Convert from database row to Record.

    Parameters
    ----------
        row : tuple
    """
    return Record(data=list(row[2:]), timestamp=row[1], rownum=row[0])
Пример #6
0
    def test_get_all_data(self):
        n = 150
        data = [self._new_data() for x in range(n)]
        for i, d in enumerate(data):
            buffer_server.append(self.pid,
                                 Record(data=d, timestamp=i, rownum=None))

        result = buffer_server.get_data(self.pid)
        self.assertEqual([r.data for r in result], data, "Should return all \
            data")
Пример #7
0
    def test_multiple_servers(self):
        """Test multiple concurrent servers."""
        pid2 = buffer_server.start(self.channels, self._next_buf_name())

        n_records = 200
        for count, data in enumerate(mock_data(n_records, self.channel_count)):
            if count % 2 == 0:
                buffer_server.append(self.pid, Record(data, count, None))
            else:
                buffer_server.append(pid2, Record(data, count, None))

        self.assertEqual(buffer_server.count(self.pid), n_records / 2)
        self.assertEqual(buffer_server.count(pid2), n_records / 2)

        server1_data = buffer_server.get_data(self.pid, 0, 5)
        server2_data = buffer_server.get_data(pid2, 0, 5)

        self.assertNotEqual(server1_data, server2_data)
        buffer_server.stop(pid2)
Пример #8
0
    def test_get_all_data(self):
        """Test method to get all data from buffer."""

        data = list(mock_data(n_records=150, n_cols=self.channel_count))
        for record_index, record in enumerate(data):
            buffer_server.append(
                self.pid,
                Record(data=record, timestamp=record_index, rownum=None))

        result = buffer_server.get_data(self.pid)
        self.assertEqual([r.data for r in result], data, "Should return all \
            data")
Пример #9
0
    def test_len(self):
        """Test buffer len."""
        n = 1000
        channel_count = 25
        channels = ["ch" + str(c) for c in range(channel_count)]

        b = Buffer(channels=channels)

        for i, d in enumerate(_mockdata(n, channel_count)):
            b.append(Record(d, float(i), None))

        self.assertEqual(len(b), n)
        b.cleanup()
Пример #10
0
    def test_len(self):
        """Test buffer len."""
        n_records = 1000
        channel_count = 25
        channels = ["ch" + str(c) for c in range(channel_count)]

        buf = Buffer(channels=channels)

        for i, data in enumerate(mock_data(n_records, channel_count)):
            buf.append(Record(data, float(i), None))

        self.assertEqual(len(buf), n_records)
        buf.cleanup()
Пример #11
0
    def test_multiple_servers(self):
        pid2 = buffer_server.start(self.channels, self._next_buf_name())

        n = 200
        for i in range(n):
            d = [
                np.random.uniform(-1000, 1000)
                for cc in range(self.channel_count)
            ]
            if i % 2 == 0:
                buffer_server.append(self.pid, Record(d, i, None))
            else:
                buffer_server.append(pid2, Record(d, i, None))

        self.assertEqual(buffer_server.count(self.pid), n / 2)
        self.assertEqual(buffer_server.count(pid2), n / 2)

        server1_data = buffer_server.get_data(self.pid, 0, 5)
        server2_data = buffer_server.get_data(pid2, 0, 5)

        self.assertNotEqual(server1_data, server2_data)
        buffer_server.stop(pid2)
Пример #12
0
    def test_query_with_invalid_filter_field(self):
        n = 20

        channels = ["ch1", "ch2", "TRG"]
        channel_count = len(channels)

        b = Buffer(channels=channels)

        for i, d in enumerate(_mockdata(n, channel_count)):
            timestamp = float(i)
            b.append(Record(d, timestamp, None))

        with pytest.raises(Exception):
            b.query_data(filters=[("ch3", ">", 0)])
        b.cleanup()
Пример #13
0
    def test_get_data_slice(self):
        n = 150
        data = [self._new_data() for x in range(n)]
        for i, d in enumerate(data):
            buffer_server.append(self.pid,
                                 Record(data=d, timestamp=i, rownum=None))

        start = 10
        end = 20

        result = buffer_server.get_data(self.pid,
                                        start,
                                        end,
                                        field='timestamp')
        self.assertEqual([r.data for r in result], data[start:end], "Should \
            return the slice of data requested.")
Пример #14
0
    def test_query_with_invalid_filter_field(self):
        """Test query with invalid filter field."""
        n_records = 20

        channels = ["ch1", "ch2", "TRG"]
        channel_count = len(channels)

        buf = Buffer(channels=channels)

        for i, data in enumerate(mock_data(n_records, channel_count)):
            timestamp = float(i)
            buf.append(Record(data, timestamp, None))

        with pytest.raises(Exception):
            buf.query_data(filters=[("ch3", ">", 0)])
        buf.cleanup()
Пример #15
0
def _main():
    import argparse
    import timeit
    import numpy as np

    parser = argparse.ArgumentParser()
    parser.add_argument('-n', '--n_records', default=100000, type=int,
                        help='number of records to insert; default is 100000')
    parser.add_argument('-s', '--chunk_size', default=10000, type=int,
                        help="default is 10000")
    parser.add_argument('-c', '--channel_count', default=25, type=int,
                        help="default is 25")

    args = parser.parse_args()
    n = args.n_records
    chunksize = args.chunk_size
    channel_count = args.channel_count

    channels = ["ch" + str(c) for c in range(channel_count)]

    print("Running with %d samples of %d channels each and chunksize %d" %
          (n, channel_count, chunksize))
    b = Buffer(channels=channels, chunksize=chunksize)

    def data(n, c):
        """Generater for mock data"""
        i = 0
        while i < n:
            yield [np.random.uniform(-1000, 1000) for cc in range(c)]
            i += 1

    starttime = timeit.default_timer()
    for j, d in enumerate(data(n, channel_count)):
        timestamp = timeit.default_timer()
        b.append(Record(d, timestamp, None))

    endtime = timeit.default_timer()
    totaltime = endtime - starttime

    print("Total records inserted: " + str(len(b)))
    print("Total time: " + str(totaltime))
    print("Records per second: " + str(n / totaltime))

    print("First 5 records")
    print(b.query(start=0, end=6))

    b.cleanup()
Пример #16
0
    def test_query_data(self):
        n = 150
        data = [self._new_data() for x in range(n)]
        last_channel = self.channels[-1]

        for i, d in enumerate(data):
            d[-1] = 1.0 if i >= 100 else 0.0
            buffer_server.append(self.pid,
                                 Record(data=d, timestamp=i, rownum=None))

        result = buffer_server.query(self.pid,
                                     filters=[(last_channel, ">", 0)],
                                     ordering=("timestamp", "asc"),
                                     max_results=1)
        self.assertEqual(len(result), 1)
        self.assertEqual(result[0].data[-1], 1.0)
        self.assertEqual(result[0].timestamp, 100.0)
Пример #17
0
    def test_get_data_slice(self):
        """Test querying for a slice of data."""

        data = list(mock_data(n_records=150, n_cols=self.channel_count))
        for i, record in enumerate(data):
            buffer_server.append(self.pid,
                                 Record(data=record, timestamp=i, rownum=None))

        start = 10
        end = 20

        result = buffer_server.get_data(self.pid,
                                        start,
                                        end,
                                        field='timestamp')
        self.assertEqual([r.data for r in result], data[start:end], "Should \
            return the slice of data requested.")
Пример #18
0
    def test_query_with_invalid_order_direction(self):
        n = 20

        channels = ["ch1", "ch2", "TRG"]
        trg_index = -1
        channel_count = len(channels)

        b = Buffer(channels=channels)

        for i, d in enumerate(_mockdata(n, channel_count)):
            d[trg_index] = 1.0 if i >= 10 else 0.0
            timestamp = float(i)
            b.append(Record(d, timestamp, None))

        with pytest.raises(Exception):
            b.query_data(ordering=("ch1", "ascending"))
        b.cleanup()
Пример #19
0
def _main():
    import argparse
    import timeit
    from bcipy.acquisition.util import mock_data

    parser = argparse.ArgumentParser()
    parser.add_argument('-n',
                        '--n_records',
                        default=100000,
                        type=int,
                        help='number of records to insert; default is 100000')
    parser.add_argument('-s',
                        '--chunk_size',
                        default=10000,
                        type=int,
                        help="default is 10000")
    parser.add_argument('-c',
                        '--channel_count',
                        default=25,
                        type=int,
                        help="default is 25")

    args = parser.parse_args()
    channels = ["ch" + str(c) for c in range(args.channel_count)]

    print((f"Running with {args.n_records} samples of {args.channel_count} ",
           f"channels each and chunksize {args.chunk_size}"))
    buf = Buffer(channels=channels, chunksize=args.chunk_size)

    starttime = timeit.default_timer()
    for record_data in mock_data(args.n_records, args.channel_count):
        timestamp = timeit.default_timer()
        buf.append(Record(record_data, timestamp, None))

    endtime = timeit.default_timer()
    totaltime = endtime - starttime

    print("Total records inserted: " + str(len(buf)))
    print("Total time: " + str(totaltime))
    print("Records per second: " + str(args.n_records / totaltime))

    print("First 5 records")
    print(buf.query(start=0, end=6))

    buf.cleanup()
Пример #20
0
    def test_query_with_invalid_order_direction(self):
        """Test query with invalid order direction"""
        n_records = 20

        channels = ["ch1", "ch2", "TRG"]
        trg_index = -1
        channel_count = len(channels)

        buf = Buffer(channels=channels)

        for i, data in enumerate(mock_data(n_records, channel_count)):
            data[trg_index] = 1.0 if i >= 10 else 0.0
            timestamp = float(i)
            buf.append(Record(data, timestamp, None))

        with pytest.raises(Exception):
            buf.query_data(ordering=("ch1", "ascending"))
        buf.cleanup()
Пример #21
0
    def run(self):
        """Process startup. Connects to the device and start reading data.
        Since this is done in a separate process from the main thread, any
        errors encountered will be written to the msg_queue.
        """

        try:
            log.debug("Connecting to device")
            self._connector.connect()
            self._connector.acquisition_init()
        except Exception as error:
            self.msg_queue.put((MSG_ERROR, str(error)))
            raise error

        # Send updated device info to the main thread; this also signals that
        # initialization is complete.
        self.msg_queue.put((MSG_DEVICE_INFO, self._connector.device_info))

        # Wait for db server start
        self.msg_queue.get()
        self.msg_queue = None

        log.debug("Starting Acquisition read data loop")
        sample = 0
        data = self._connector.read_data()

        # begin continuous acquisition process as long as data received
        while self.running() and data:
            sample += 1
            if DEBUG and sample % DEBUG_FREQ == 0:
                log.debug("Read sample: %s", str(sample))

            buffer_server.append(self._buf,
                                 Record(data, self._clock.getTime(), sample))
            try:
                # Read data again
                data = self._connector.read_data()
            # pylint: disable=broad-except
            except Exception as error:
                log.error("Error reading data from device: %s", str(error))
                data = None
                break
        log.debug("Total samples read: %s", str(sample))
        self._connector.disconnect()
Пример #22
0
    def test_query_data(self):
        """Test query_data method"""

        data = list(mock_data(n_records=150, n_cols=self.channel_count))
        last_channel = self.channels[-1]

        for record_index, record in enumerate(data):
            record[-1] = 1.0 if record_index >= 100 else 0.0
            buffer_server.append(
                self.pid,
                Record(data=record, timestamp=record_index, rownum=None))

        result = buffer_server.query(self.pid,
                                     filters=[(last_channel, ">", 0)],
                                     ordering=("timestamp", "asc"),
                                     max_results=1)
        self.assertEqual(len(result), 1)
        self.assertEqual(result[0].data[-1], 1.0)
        self.assertEqual(result[0].timestamp, 100.0)
Пример #23
0
    def test_query_before_flush(self):
        """If a query is made before chunksize records have been written,
        the data should still be available."""

        n = 1000
        channel_count = 25
        channels = ["ch" + str(c) for c in range(channel_count)]

        b = Buffer(channels=channels, chunksize=10000)

        for i, d in enumerate(_mockdata(n, channel_count)):
            timestamp = float(i)
            b.append(Record(d, timestamp, None))

        rows = b.query(start=b.start_time, field='timestamp')
        self.assertEqual(len(rows), n)
        self.assertEqual(len(b.all()), n)

        b.cleanup()
Пример #24
0
    def test_latest(self):
        """Test query for most recent items."""
        n = 1000
        latest_n = 100
        channel_count = 25
        channels = ["ch" + str(c) for c in range(channel_count)]

        b = Buffer(channels=channels)

        latest = []
        for i, d in enumerate(_mockdata(n, channel_count)):
            timestamp = float(i)
            if i >= n - latest_n:
                latest.append((d, timestamp, i + 1))
            b.append(Record(d, timestamp, None))

        rows = b.latest(latest_n)
        for j, item in enumerate(reversed(latest)):
            self.assertEqual(item, rows[j])
        b.cleanup()
Пример #25
0
    def test_buffer(self):
        """Test Buffer functionality."""

        n_records = 15000
        channel_count = 25
        channels = ["ch" + str(c) for c in range(channel_count)]

        buf = Buffer(channels=channels, chunksize=10000)

        append_timer = _Timer()
        timevalues = {}
        for i, data in enumerate(mock_data(n_records, channel_count)):
            timestamp = float(i)
            if i % 1000 == 0:
                timevalues[timestamp] = data
            with append_timer:
                buf.append(Record(data, timestamp, None))

        self.assertEqual(buf.start_time, 0.0)
        starttime = 0.0
        rows = buf.query(start=starttime, end=starttime +
                         1.0, field='timestamp')

        self.assertEqual(len(rows), 1, "Results should not include end value.")

        self.assertEqual(rows[0].timestamp, starttime)
        self.assertEqual(rows[0].data, timevalues[starttime])

        start_time = 1000.0
        end_time = 2000.0
        rows = buf.query(start=start_time, end=end_time, field='timestamp')
        self.assertEqual(len(rows), start_time)
        self.assertEqual(rows[0].data, timevalues[start_time])

        rows = buf.query(start=buf.start_time, field='timestamp')
        self.assertEqual(
            len(rows), n_records, ("Providing only the start should return "
                                   "the rest."))
        buf.cleanup()
Пример #26
0
    def run(self):
        """Continuously reads data from the source and sends it to the buffer
        for processing.
        """

        try:
            logging.debug("Connecting to device")
            self._device.connect()
            self._device.acquisition_init()
        except Exception as e:
            self._msg_queue.put((MSG_ERROR, str(e)))
            raise e

        # Send updated device info to the main thread; this also signals that
        # initialization is complete.
        self._msg_queue.put((MSG_DEVICE_INFO, self._device.device_info))

        logging.debug("Starting Acquisition read data loop")
        sample = 0
        data = self._device.read_data()

        # begin continuous acquisition process as long as data received
        while self.running() and data:
            sample += 1
            if DEBUG and sample % DEBUG_FREQ == 0:
                logging.debug("Read sample: " + str(sample))

            self._data_queue.put(Record(data, self._clock.getTime(), sample))
            try:
                # Read data again
                data = self._device.read_data()
            except Exception as e:
                logging.debug("Error reading data from device: " + str(e))
                data = None
                break
        logging.debug("Total samples read: " + str(sample))
        self._device.disconnect()