def test_remove_duplicate_readings():
    data = _get_test_data()
    velocities = compute_velocities(data)
    number_of_duplicates = np.sum(np.isnan(velocities))

    fixed_data = remove_duplicate_points(data)
    fixed_velocities = compute_velocities(fixed_data)
    assert np.sum(np.isnan(fixed_velocities)) == 0
    # check that we deleted duplicates only
    print(data.shape[0])
    print(fixed_data.shape[0])
    print(number_of_duplicates)
    assert fixed_data.shape[0] == data.shape[0] - number_of_duplicates
    print(fixed_data.shape[0], data.shape[0])

    fixed_data_processor = apply_filter(data, DuplicateTimeFilter())
    assert (fixed_data == fixed_data_processor).all()
def plot_velocity_histogram(data):
    delta_time = extract_delta_time(data) 
    velocities = compute_velocities(data)
    plots.plot(delta_time)
    plots.show()

    plots.hist(velocities*ms_to_mph, 100)
    plots.show()
def test_remove_outliers():
    np.set_printoptions(suppress=True)
    data = remove_stationary_points(remove_duplicate_points(_get_test_data()))

    velocity_threshold = 85.0
    fixed_data = apply_filter(data, VelocityOutliersFilter(velocity_threshold))

    # check that data has outliers in velocity and distance
    velocities = compute_velocities(data)
    outliers = np.where(velocities * ms_to_mph > 85)[0]
    assert len(outliers) > 0
    assert np.amax(extract_delta_dist(data)) > 157900

    # no large velocities left
    velocities = compute_velocities(fixed_data)
    assert np.amax(velocities) * ms_to_mph < velocity_threshold
    assert np.amax(extract_delta_dist(fixed_data)) < 330

    # we expect this number of point to be removed
    print(data.shape[0] - fixed_data.shape[0])
    assert data.shape[0] - fixed_data.shape[0] == 5