def test_remove_stationary_noise():
    """
    The data has large amount of noise - switching between SD and LA every 10 seconds.
    It starts from SD, then noise, later it returns to SD. We test that LA is ignored
    """
    data = remove_duplicate_points(_get_test_data())[561:576]

    fixed_data = apply_filter(data, VelocityOutliersFilter())
    print(len(fixed_data))
    assert len(fixed_data) == 11

    stationary_point = [0, 33.004964, -117.060207]
    distances = np.array([distance(stationary_point, d) for d in fixed_data])

    assert (distances < 246.6).all()
def test_remove_duplicate_readings():
    data = _get_test_data()
    velocities = compute_velocities(data)
    number_of_duplicates = np.sum(np.isnan(velocities))

    fixed_data = remove_duplicate_points(data)
    fixed_velocities = compute_velocities(fixed_data)
    assert np.sum(np.isnan(fixed_velocities)) == 0
    # check that we deleted duplicates only
    print(data.shape[0])
    print(fixed_data.shape[0])
    print(number_of_duplicates)
    assert fixed_data.shape[0] == data.shape[0] - number_of_duplicates
    print(fixed_data.shape[0], data.shape[0])

    fixed_data_processor = apply_filter(data, DuplicateTimeFilter())
    assert (fixed_data == fixed_data_processor).all()
def test_remove_stationary_noise_return_to_stable():
    """
    The data has large amount of noise - switching between SD and LA every 10 seconds.
    It starts from the noisy point, later it returns to SD.
    Here we test that even if data starts with noisy value, we still converge
    to stable point
    """
    data = remove_duplicate_points(_get_test_data())[563:576]

    fixed_data = apply_filter(data, VelocityOutliersFilter(85))

    stationary_point = [0, 33.004964, -117.060207]
    distances = np.array([distance(stationary_point, d) for d in fixed_data])

    print(fixed_data)
    assert len(fixed_data) == 7
    # filter converged after few steps
    assert (distances[:4] > 157000).all()
    assert (distances[4:] < 246.6).all()
def test_remove_outliers():
    np.set_printoptions(suppress=True)
    data = remove_stationary_points(remove_duplicate_points(_get_test_data()))

    velocity_threshold = 85.0
    fixed_data = apply_filter(data, VelocityOutliersFilter(velocity_threshold))

    # check that data has outliers in velocity and distance
    velocities = compute_velocities(data)
    outliers = np.where(velocities * ms_to_mph > 85)[0]
    assert len(outliers) > 0
    assert np.amax(extract_delta_dist(data)) > 157900

    # no large velocities left
    velocities = compute_velocities(fixed_data)
    assert np.amax(velocities) * ms_to_mph < velocity_threshold
    assert np.amax(extract_delta_dist(fixed_data)) < 330

    # we expect this number of point to be removed
    print(data.shape[0] - fixed_data.shape[0])
    assert data.shape[0] - fixed_data.shape[0] == 5