Ejemplo n.º 1
0
def test_strava_power_model_auto_compute():
    # check that the acceleration and the elevation will be auto-computed
    power_auto = strava_power_model(activity, cyclist_weight=70)

    activity_ele_acc = activity.copy()
    activity_ele_acc = gradient_elevation(activity)
    activity_ele_acc = acceleration(activity_ele_acc)
    power_ele_acc = strava_power_model(activity_ele_acc, cyclist_weight=70)

    assert_series_equal(power_auto, power_ele_acc)
Ejemplo n.º 2
0
def test_gradient_elevation(activity, append, type_output, shape):
    output = gradient_elevation(activity, append=append)
    assert isinstance(output, type_output)
    assert output.shape == shape
Ejemplo n.º 3
0
def test_gradient_elevation_error():
    activity = pd.DataFrame({'A': np.random.random(1000)})
    msg = "elevation and distance data are required"
    with pytest.raises(MissingDataError, message=msg):
        gradient_elevation(activity)
Ejemplo n.º 4
0
def test_gradient_elevation(activity, append, type_output, shape):
    output = gradient_elevation(activity, append=append)
    assert isinstance(output, type_output)
    assert output.shape == shape
Ejemplo n.º 5
0
def test_gradient_elevation_error():
    activity = pd.DataFrame({'A': np.random.random(1000)})
    msg = "elevation and distance data are required"
    with pytest.raises(MissingDataError, message=msg):
        gradient_elevation(activity)
Ejemplo n.º 6
0
        if not pd.isnull(activity).any().any():
            valid_data.append(activity)
            valid_filenames.append(filename)
data = valid_data

###############################################################################
# Data processing
# 1. Compute the acceleration and elevation gradient
# 2. Remove corrupted data (division by zero, etc.)
# 3. Compute the power for each ride using user information

for activity_idx in range(len(data)):
    # compute acceleration
    data[activity_idx] = acceleration(data[activity_idx])
    # compute gradient elevation
    data[activity_idx] = gradient_elevation(data[activity_idx])

for activity in data:
    activity.replace([np.inf, -np.inf], np.nan, inplace=True)

data_concat = pd.concat(data)
y = data_concat['power']
X = data_concat.drop('power', axis=1)
X.fillna(X.mean(), inplace=True)
groups = []
for group_idx, activity in enumerate(data):
    groups += [group_idx] * activity.shape[0]
groups = np.array(groups)

weight_user = {'user_1': 86., 'user_2': 72., 'user_3': 61., 'user_5': 72.}
weight_groups = np.zeros_like(groups)