예제 #1
0
def test_interpolate_on_data_no_jitter():
    data = dummy_data_no_jitter
    data.reset()
    interpolate = Interpolate(rate=rate, method='linear')
    looper = Looper(data, interpolate)
    dejittered_data, _ = looper.run(chunk_size=8)
    pd.testing.assert_frame_equal(dejittered_data, data._data.iloc[:-1])
예제 #2
0
def test_rollingdetect(ppg_generator):
    node = RollingDetect(window=0.8, tol=0.5)

    # reset generator
    ppg_generator.reset()
    # loop across chunks
    looper = Looper(ppg_generator, node)
    cascade_output, _ = looper.run(chunk_size=5)

    expected_index = pd.DatetimeIndex(
        ['2018-11-19 11:06:41.529004261', '2018-11-19 11:06:41.685242884'],
        dtype='datetime64[ns]',
        freq=None)

    expected_labels = ['peak', 'valley']
    expected_data_peak = {
        'value': 0.7587511539459229,
        'lag': 0.0,
        'interval': 2.562340529,
        'column_name': 'PPG'
    }
    expected_data_valley = {
        'value': -0.9906618595123292,
        'lag': 2.718579152,
        'interval': 2.718579152,
        'column_name': 'PPG'
    }
    pd.testing.assert_index_equal(expected_index, cascade_output.index)
    np.testing.assert_array_equal(expected_labels, cascade_output.label.values)
    assert_dict_almost_equal(expected_data_peak,
                             cascade_output.data.values[0],
                             keys=['value', 'lag', 'interval', 'column_name'])
    assert_dict_almost_equal(expected_data_valley,
                             cascade_output.data.values[1],
                             keys=['value', 'lag', 'interval', 'column_name'])
예제 #3
0
def test_round_on_data_no_jitter():
    data = dummy_data_no_jitter
    data.reset()
    node = Snap(rate=rate)
    looper = Looper(data, node)
    dejittered_data, _ = looper.run(chunk_size=8)
    pd.testing.assert_frame_equal(dejittered_data, data._data)
예제 #4
0
def test_localdetect_on_gaussians():
    """ Test on a sum of two gaussians if the peak center is well estimated """
    rate = 128
    num_points = 10 * rate
    tm.K = 1  # tm will generate 1 column
    original = tm.makeTimeDataFrame(num_points, freq='L')
    original.index = original.index[0] + pd.to_timedelta(
        np.arange(num_points) / rate, unit='s')

    sigmas = [5.0, 1.0]
    lags = [5.0 / rate, 1.0 / rate]
    center_locs = [1 * rate, 5 * rate]
    intervals = [1.0, 5.0 - 1.0]

    gaussians_values = _gen_gaussians(center_locs, sigmas, num_points)
    peak_times = original.index[center_locs]

    original['A'] = gaussians_values

    data_gaussians = ReadData(original)
    node = LocalDetect(delta=0.5, tol=0.5)
    # loop across chunks
    looper = Looper(data_gaussians, node)
    cascade_output, _ = looper.run(chunk_size=5)
    estimation_times = [
        pd.Timestamp(event['extremum_time'])
        for event in cascade_output[cascade_output.label == 'peak'].data.values
    ]

    assert estimation_times == list(peak_times)

    data_peaks = pd.concat([
        pd.DataFrame(meta, index=[pd.Timestamp(meta['extremum_time'])])
        for meta in cascade_output[cascade_output.label == 'peak'].data.values
    ],
                           ignore_index=True)

    expected_peaks = pd.DataFrame(
        dict(column_name=['A', 'A'],
             value=[1.0, 1.0],
             interval=intervals,
             lag=lags))
    pd.testing.assert_frame_equal(data_peaks.drop(
        ['now', 'extremum_time', 'detection_time'], axis=1),
                                  expected_peaks,
                                  check_like=True)
예제 #5
0
def test_droprows_2(generator):
    """ Test DropRows
    test for factor = 2, 3, 4, 8 that the output is equivalent to applying a
    rolling window and taking the mean over the samples.
    size of chunk is 5 rows.
    """

    for factor in [2, 3, 4, 8]:
        generator.reset()
        node = DropRows(factor=factor, method="mean")
        looper = Looper(node=node, generator=generator)
        out_data, _ = looper.run(chunk_size=10)
        expected = (generator._data.rolling(
            window=factor, min_periods=factor,
            center=False).mean().iloc[np.arange(factor - 1,
                                                len(generator._data), factor)])
        pd.testing.assert_frame_equal(
            out_data.iloc[:len(generator._data) // factor], expected)
예제 #6
0
def test_localdetect_on_ppg(ppg_generator):
    node = LocalDetect(delta=0.5, tol=0.5)
    # reset generator
    ppg_generator.reset()
    # loop across chunks
    looper = Looper(ppg_generator, node)
    cascade_output, _ = looper.run(chunk_size=5)

    expected_extremum_times = [
        '2018-11-19 11:06:39.620900', '2018-11-19 11:06:39.794709043',
        '2018-11-19 11:06:40.605209027', '2018-11-19 11:06:40.761455675',
        '2018-11-19 11:06:41.560254261', '2018-11-19 11:06:41.714533810'
    ]
    actual_extremum_times = [
        data['extremum_time'] for data in cascade_output.data.values
    ]
    expected_labels = ['peak', 'valley'] * 3
    expected_data_peak = {
        'value': np.array([1.00546074]),
        'lag': 0.064445281,
        'interval': 0.654236268,
        'column_name': 'PPG'
    }
    expected_data_valley = {
        'value': np.array([-1.01101112]),
        'lag': 0.437466566,
        'interval': 0.654236268,
        'column_name': 'PPG'
    }

    assert expected_extremum_times == actual_extremum_times
    np.testing.assert_array_equal(expected_labels, cascade_output.label.values)
    assert_dict_almost_equal(expected_data_peak,
                             cascade_output.data.values[0],
                             keys=['value', 'lag', 'interval', 'column_name'])
    assert_dict_almost_equal(expected_data_valley,
                             cascade_output.data.values[1],
                             keys=['value', 'lag', 'interval', 'column_name'])
def test_cascade_firfilter(generator):
    """ Test FIRFilter"""
    rate = generator._rate

    # create the filter
    node_fir = FIRFilter(rate=rate,
                         columns="all",
                         order=20,
                         frequencies=[3, 4],
                         filter_type="lowpass")
    expected_coeffs = np.array([
        -0.00217066, -0.00208553, -0.00108039, 0.00392436, 0.01613796,
        0.03711417, 0.06535715, 0.09608169, 0.12241194, 0.13763991, 0.13763991,
        0.12241194, 0.09608169, 0.06535715, 0.03711417, 0.01613796, 0.00392436,
        -0.00108039, -0.00208553, -0.00217066
    ])

    # Filter online (chunk by chunk)
    # --------------
    # reset the data streamer
    generator.reset()
    looper = Looper(node=node_fir, generator=generator)
    cascade_output, cascade_meta = looper.run(chunk_size=5)

    # Filter offline (whole data)
    # --------------
    # reset the data streamer
    generator.reset()
    looper = Looper(node=node_fir, generator=generator)
    cascade_output, metas = looper.run(chunk_size=None)

    # Filter offline
    # --------------
    node_fir.i.data = generator._data.copy()
    node_fir.update()
    continuous_output = node_fir.o.data

    delay = cascade_meta[0]['delay']

    # assert filters coeffs are correct
    np.testing.assert_array_almost_equal(node_fir._coeffs, expected_coeffs)

    # assert signal filtered offline and online are the same
    warmup = delay * 2
    pd.testing.assert_frame_equal(
        continuous_output.iloc[int(warmup * node_fir._rate):],
        cascade_output.iloc[int(warmup * node_fir._rate):],
        check_less_precise=3)

    # correct for induced delay
    fir_o_delayed = cascade_output.copy()
    fir_o_delayed.index -= delay * np.timedelta64(1, 's')
def test_cascade_iirfilter(generator):
    """ Test IIRFilter cascade  """
    rate = generator._rate
    cutoff_hz = 3
    # create filter
    node_iir = IIRFilter(rate=rate,
                         columns='all',
                         frequencies=[cutoff_hz],
                         filter_type="lowpass",
                         order=3)

    # Filter online (chunk by chunk)
    # --------------
    # reset the data streamer
    generator.reset()
    looper = Looper(node=node_iir, generator=generator)
    cascade_output, _ = looper.run(chunk_size=5)

    # Filter offline (whole data)
    # --------------
    # reset the data streamer
    generator.reset()
    looper = Looper(node=node_iir, generator=generator)
    continuous_output, _ = looper.run(chunk_size=None)

    # assert filters coeffs are correct
    expected_sos = np.array(
        [[0.00475052, 0.00950105, 0.00475052, 1., -0.6795993, 0.],
         [1., 1., 0., 1., -1.57048578, 0.68910035]])
    np.testing.assert_array_almost_equal(node_iir._sos, expected_sos)

    # assert signal filtered offline and online are the same after the warmup period.
    order = 3
    warmup = 100 * (order) / (node_iir._rate)
    np.testing.assert_array_almost_equal(
        continuous_output.iloc[int(warmup * node_iir._rate):].values,
        cascade_output.iloc[int(warmup * node_iir._rate):].values, 3)
예제 #9
0
def test_data_not_monotonic():
    data = dummy_data_not_monotonic
    node = Interpolate(rate=rate)
    looper = Looper(data, node)
    with pytest.raises(WorkerInterrupt):
        _, _ = looper.run(chunk_size=8)