Beispiel #1
0
    def test_query_data(self):
        """Test querying for data."""
        n_records = 20

        channels = ["ch1", "ch2", "TRG"]
        trg_index = -1
        channel_count = len(channels)

        buf = Buffer(channels=channels)

        for i, data in enumerate(mock_data(n_records, channel_count)):
            data[trg_index] = 1.0 if i >= 10 else 0.0
            timestamp = float(i)
            buf.append(Record(data, timestamp, None))

        rows = buf.query_data(filters=[("TRG", ">", 0)],
                              ordering=("timestamp", "asc"),
                              max_results=1)

        self.assertEqual(len(rows), 1, "Should have limited to max_results.")
        self.assertEqual(rows[0].data[trg_index], 1.0,
                         "Should get filtered data.")
        self.assertEqual(rows[0].timestamp, 10.0,
                         "Should get first instance")
        buf.cleanup()
Beispiel #2
0
    def test_count(self):
        """Test that the count of records is correct."""
        n_records = 500
        for i, data in enumerate(mock_data(n_records, self.channel_count)):
            buffer_server.append(self.pid,
                                 Record(data=data, timestamp=i, rownum=None))

        self.assertEqual(buffer_server.count(self.pid), n_records)
Beispiel #3
0
 def __init__(self, *args, **kwargs):
     super(TestDataAcquisitionClient, self).__init__(*args, **kwargs)
     num_channels = 25
     num_records = 500
     self.device_spec = DeviceSpec(
         name="Mock_device",
         channels=['ch' + str(i) for i in range(num_channels)],
         sample_rate=300.0)
     self.mock_data = list(mock_data(num_records, num_channels))
Beispiel #4
0
    def test_get_all_data(self):
        """Test method to get all data from buffer."""

        data = list(mock_data(n_records=150, n_cols=self.channel_count))
        for record_index, record in enumerate(data):
            buffer_server.append(
                self.pid,
                Record(data=record, timestamp=record_index, rownum=None))

        result = buffer_server.get_data(self.pid)
        self.assertEqual([r.data for r in result], data, "Should return all \
            data")
Beispiel #5
0
    def test_len(self):
        """Test buffer len."""
        n_records = 1000
        channel_count = 25
        channels = ["ch" + str(c) for c in range(channel_count)]

        buf = Buffer(channels=channels)

        for i, data in enumerate(mock_data(n_records, channel_count)):
            buf.append(Record(data, float(i), None))

        self.assertEqual(len(buf), n_records)
        buf.cleanup()
Beispiel #6
0
    def test_file_generator(self):
        """Should stream data from a file."""
        row_count = 100
        header = ['col1,col2,col3']
        data = list(mock_data(row_count, len(header)))
        rows = map(lambda x: ','.join(map(str, x)), data)
        test_data = '\n'.join(header + rows)

        with patch('bcipy.acquisition.datastream.generator.open',
                   mock_open(read_data=test_data), create=True):

            gen = file_data(filename='foo', header_row=1)
            generated_data = [next(gen) for _ in range(row_count)]

            for i, row in enumerate(generated_data):
                self.assertEqual(row, data[i])
Beispiel #7
0
    def test_query_with_invalid_filter_field(self):
        """Test query with invalid filter field."""
        n_records = 20

        channels = ["ch1", "ch2", "TRG"]
        channel_count = len(channels)

        buf = Buffer(channels=channels)

        for i, data in enumerate(mock_data(n_records, channel_count)):
            timestamp = float(i)
            buf.append(Record(data, timestamp, None))

        with pytest.raises(Exception):
            buf.query_data(filters=[("ch3", ">", 0)])
        buf.cleanup()
Beispiel #8
0
    def test_get_data_slice(self):
        """Test querying for a slice of data."""

        data = list(mock_data(n_records=150, n_cols=self.channel_count))
        for i, record in enumerate(data):
            buffer_server.append(self.pid,
                                 Record(data=record, timestamp=i, rownum=None))

        start = 10
        end = 20

        result = buffer_server.get_data(self.pid,
                                        start,
                                        end,
                                        field='timestamp')
        self.assertEqual([r.data for r in result], data[start:end], "Should \
            return the slice of data requested.")
Beispiel #9
0
def _main():
    import argparse
    import timeit
    from bcipy.acquisition.util import mock_data

    parser = argparse.ArgumentParser()
    parser.add_argument('-n',
                        '--n_records',
                        default=100000,
                        type=int,
                        help='number of records to insert; default is 100000')
    parser.add_argument('-s',
                        '--chunk_size',
                        default=10000,
                        type=int,
                        help="default is 10000")
    parser.add_argument('-c',
                        '--channel_count',
                        default=25,
                        type=int,
                        help="default is 25")

    args = parser.parse_args()
    channels = ["ch" + str(c) for c in range(args.channel_count)]

    print((f"Running with {args.n_records} samples of {args.channel_count} ",
           f"channels each and chunksize {args.chunk_size}"))
    buf = Buffer(channels=channels, chunksize=args.chunk_size)

    starttime = timeit.default_timer()
    for record_data in mock_data(args.n_records, args.channel_count):
        timestamp = timeit.default_timer()
        buf.append(Record(record_data, timestamp, None))

    endtime = timeit.default_timer()
    totaltime = endtime - starttime

    print("Total records inserted: " + str(len(buf)))
    print("Total time: " + str(totaltime))
    print("Records per second: " + str(args.n_records / totaltime))

    print("First 5 records")
    print(buf.query(start=0, end=6))

    buf.cleanup()
Beispiel #10
0
    def test_query_with_invalid_order_direction(self):
        """Test query with invalid order direction"""
        n_records = 20

        channels = ["ch1", "ch2", "TRG"]
        trg_index = -1
        channel_count = len(channels)

        buf = Buffer(channels=channels)

        for i, data in enumerate(mock_data(n_records, channel_count)):
            data[trg_index] = 1.0 if i >= 10 else 0.0
            timestamp = float(i)
            buf.append(Record(data, timestamp, None))

        with pytest.raises(Exception):
            buf.query_data(ordering=("ch1", "ascending"))
        buf.cleanup()
Beispiel #11
0
    def test_file_generator_end(self):
        """Should throw an exception when all data has been consumed"""
        row_count = 10

        header = ['col1,col2,col3']
        data = list(mock_data(row_count, len(header)))
        rows = map(lambda x: ','.join(map(str, x)), data)
        test_data = '\n'.join(header + rows)

        with patch('bcipy.acquisition.datastream.generator.open',
                   mock_open(read_data=test_data), create=True):
            gen = file_data(filename='foo', header_row=1)
            # exhaust the generator
            for _ in range(row_count):
                next(gen)

            with pytest.raises(StopIteration):
                data.append(next(gen))
Beispiel #12
0
    def test_query_data(self):
        """Test query_data method"""

        data = list(mock_data(n_records=150, n_cols=self.channel_count))
        last_channel = self.channels[-1]

        for record_index, record in enumerate(data):
            record[-1] = 1.0 if record_index >= 100 else 0.0
            buffer_server.append(
                self.pid,
                Record(data=record, timestamp=record_index, rownum=None))

        result = buffer_server.query(self.pid,
                                     filters=[(last_channel, ">", 0)],
                                     ordering=("timestamp", "asc"),
                                     max_results=1)
        self.assertEqual(len(result), 1)
        self.assertEqual(result[0].data[-1], 1.0)
        self.assertEqual(result[0].timestamp, 100.0)
Beispiel #13
0
    def test_multiple_servers(self):
        """Test multiple concurrent servers."""
        pid2 = buffer_server.start(self.channels, self._next_buf_name())

        n_records = 200
        for count, data in enumerate(mock_data(n_records, self.channel_count)):
            if count % 2 == 0:
                buffer_server.append(self.pid, Record(data, count, None))
            else:
                buffer_server.append(pid2, Record(data, count, None))

        self.assertEqual(buffer_server.count(self.pid), n_records / 2)
        self.assertEqual(buffer_server.count(pid2), n_records / 2)

        server1_data = buffer_server.get_data(self.pid, 0, 5)
        server2_data = buffer_server.get_data(pid2, 0, 5)

        self.assertNotEqual(server1_data, server2_data)
        buffer_server.stop(pid2)
Beispiel #14
0
    def test_query_before_flush(self):
        """If a query is made before chunksize records have been written,
        the data should still be available."""

        n_records = 1000
        channel_count = 25
        channels = ["ch" + str(c) for c in range(channel_count)]

        buf = Buffer(channels=channels, chunksize=10000)

        for i, data in enumerate(mock_data(n_records, channel_count)):
            timestamp = float(i)
            buf.append(Record(data, timestamp, None))

        rows = buf.query(start=buf.start_time, field='timestamp')
        self.assertEqual(len(rows), n_records)
        self.assertEqual(len(buf.all()), n_records)

        buf.cleanup()
Beispiel #15
0
    def test_latest(self):
        """Test query for most recent items."""
        n_records = 1000
        latest_n = 100
        channel_count = 25
        channels = ["ch" + str(c) for c in range(channel_count)]

        buf = Buffer(channels=channels)

        latest = []
        for i, data in enumerate(mock_data(n_records, channel_count)):
            timestamp = float(i)
            if i >= n_records - latest_n:
                latest.append((data, timestamp, i+1))
            buf.append(Record(data, timestamp, None))

        rows = buf.latest(latest_n)
        for j, item in enumerate(reversed(latest)):
            self.assertEqual(item, rows[j])
        buf.cleanup()
Beispiel #16
0
    def test_buffer(self):
        """Test Buffer functionality."""

        n_records = 15000
        channel_count = 25
        channels = ["ch" + str(c) for c in range(channel_count)]

        buf = Buffer(channels=channels, chunksize=10000)

        append_timer = _Timer()
        timevalues = {}
        for i, data in enumerate(mock_data(n_records, channel_count)):
            timestamp = float(i)
            if i % 1000 == 0:
                timevalues[timestamp] = data
            with append_timer:
                buf.append(Record(data, timestamp, None))

        self.assertEqual(buf.start_time, 0.0)
        starttime = 0.0
        rows = buf.query(start=starttime, end=starttime +
                         1.0, field='timestamp')

        self.assertEqual(len(rows), 1, "Results should not include end value.")

        self.assertEqual(rows[0].timestamp, starttime)
        self.assertEqual(rows[0].data, timevalues[starttime])

        start_time = 1000.0
        end_time = 2000.0
        rows = buf.query(start=start_time, end=end_time, field='timestamp')
        self.assertEqual(len(rows), start_time)
        self.assertEqual(rows[0].data, timevalues[start_time])

        rows = buf.query(start=buf.start_time, field='timestamp')
        self.assertEqual(
            len(rows), n_records, ("Providing only the start should return "
                                   "the rest."))
        buf.cleanup()
Beispiel #17
0
 def __init__(self, *args, **kwargs):
     super(TestDataAcquistionClient, self).__init__(*args, **kwargs)
     num_channels = 25
     num_records = 500
     self.mock_channels = ['ch' + str(i) for i in range(num_channels)]
     self.mock_data = list(mock_data(num_records, num_channels))