Esempio n. 1
0
    def _hw_ai_callback(self):
        # TODO: Get lock?
        for name, b in self._buffers['hw_ai'].items():
            t0_sample = b._total_samples_read

            samples = b.read()[0] / b._sf
            if b._discarded < b._discard:
                to_discard = min(samples.shape[-1], b._discard)
                b._discarded += to_discard
                t0_sample -= to_discard

                log.info(
                    'Discarding %.0f samples to compensate for AI filter delay',
                    to_discard)
                samples = samples[to_discard:]

            if len(samples):
                b._total_samples_read += len(samples)
                data = InputData(samples,
                                 metadata={
                                     't0_sample': t0_sample,
                                     'fs': b.fs
                                 })
                for channel_name, cb in self._callbacks.get('ai', []):
                    if channel_name == name:
                        cb(data)
Esempio n. 2
0
def test_input_data():
    expected_array = np.concatenate((
        np.zeros(shape=5),
        np.ones(shape=5),
        np.random.uniform(size=5),
    ))
    expected_metadata = {'t0': 0}
    expected_input = InputData(expected_array, expected_metadata)

    input_data = [
        InputData(expected_array[0:5], metadata=expected_metadata),
        InputData(expected_array[5:10], metadata=expected_metadata),
        InputData(expected_array[10:15], metadata=expected_metadata),
    ]

    result = concatenate(input_data)
    assert expected_input.metadata == result.metadata
    assert np.array_equal(expected_input, result)
Esempio n. 3
0
def test_pipeline(data, pipeline):
    expected = []
    for i in range(8):
        d = InputData(np.random.uniform(size=5), {'n': 5})
        pipeline.send(d)
        expected.append(d[np.newaxis])
    expected = concatenate(expected, axis=-1).reshape((-1, 10))

    assert len(data) == 1
    assert data[0].shape == (4, 10)
    assert np.array_equal(expected, data[0])
    assert data[0].metadata == {'n': 5, 'block_size': 10}
Esempio n. 4
0
def test_slice():
    r = np.random.uniform(size=100)
    d = InputData(r, {'t0_sample': 10, 'fs': 100})
    assert d[::2].metadata == {'t0_sample': 10, 'fs': 50}
    assert d[::4].metadata == {'t0_sample': 10, 'fs': 25}
    assert d[:4:].metadata == {'t0_sample': 10, 'fs': 100}
    assert d[1::2].metadata == {'t0_sample': 11, 'fs': 50}
    assert d[10::2].metadata == {'t0_sample': 20, 'fs': 50}
    assert d[10] == r[10]
    assert np.all(d[:10] == r[:10])
    assert d[-10::2].metadata == {'t0_sample': 100, 'fs': 50}
    assert d[-20::2].metadata == {'t0_sample': 90, 'fs': 50}
    assert d[-20::].metadata == {'t0_sample': 90, 'fs': 100}
    assert d[-20::-2].metadata == {'t0_sample': 90, 'fs': -50}
Esempio n. 5
0
def test_pipeline_flush(data, pipeline):
    expected = []
    for i in range(16):
        d = InputData(np.random.uniform(size=5), {'n': 5})
        pipeline.send(d)
        expected.append(d[np.newaxis])
        if i == 6:
            pipeline.send(Ellipsis)
            expected = []

    expected = concatenate(expected[:8], axis=-1).reshape((-1, 10))
    assert len(data) == 1
    assert data[0].shape == (4, 10)
    assert np.array_equal(expected, data[0])
    assert data[0].metadata == expected.metadata