def least_squares_fit(x: Vector, y: Vector) -> Tuple[float, float]:
    """
    Given two vectors x and y,
    find the least-squares values of alpha and beta
    """
    beta = correlation(x, y) * standard_deviation(y) / standard_deviation(x)
    alpha = mean(y) - beta * mean(x)
    return alpha, beta
def least_squares_fit(x: Vector, y: Vector) -> Tuple[float, float]:
    """
    Given two vectors x and y,
    find the least-squares values of alpha and beta
    """
    beta = correlation(x, y) * standard_deviation(y) / standard_deviation(x)
    alpha = mean(y) - beta * mean(x)
    return alpha, beta
def least_squares_fit(x: Vector, y: Vector) -> Tuple[float, float]:
    """
    Na podstawie przekazanych wartości treningowych x i y
    znajdź za pomocą metody najmniejszych kwadratów optymalne wartości alpha i beta.
    """
    beta = correlation(x, y) * standard_deviation(y) / standard_deviation(x)
    alpha = mean(y) - beta * mean(x)
    return alpha, beta
def least_squares_fit(x, y):
    beta = correlation(x, y) * standard_deviation(y) / standard_deviation(x)
    alpha = mean(y) - beta * mean(x)
    return alpha, beta
def least_squares_fit(x, y):
    """given training values for x and y, find the least-squares values of alpha and beta"""
    beta = correlation(x, y) * standard_deviation(y) / standard_deviation(x)
    alpha = mean(y) - beta * mean(x)
    return alpha, beta
    13.47, 26.33, 35.02, 32.09, 24.81, 19.33, 28.77, 24.26, 31.98, 25.73,
    24.86, 16.28, 34.51, 15.23, 39.72, 40.8, 26.06, 35.76, 34.76, 16.13, 44.04,
    18.03, 19.65, 32.62, 35.59, 39.43, 14.18, 35.24, 40.13, 41.82, 35.45,
    36.07, 43.67, 24.61, 20.9, 21.9, 18.79, 27.61, 27.21, 26.61, 29.77, 20.59,
    27.53, 13.82, 33.2, 25, 33.1, 36.65, 18.63, 14.87, 22.2, 36.81, 25.53,
    24.62, 26.25, 18.21, 28.08, 19.42, 29.79, 32.8, 35.99, 28.32, 27.79, 35.88,
    29.06, 36.28, 14.1, 36.63, 37.49, 26.9, 18.58, 38.48, 24.48, 18.95, 33.55,
    14.24, 29.04, 32.51, 25.63, 22.22, 19, 32.73, 15.16, 13.9, 27.2, 32.01,
    29.27, 33, 13.74, 20.42, 27.32, 18.23, 35.35, 28.48, 9.08, 24.62, 20.12,
    35.26, 19.92, 31.02, 16.49, 12.16, 30.7, 31.22, 34.65, 13.13, 27.51, 33.2,
    31.57, 14.1, 33.42, 17.44, 10.12, 24.42, 9.82, 23.39, 30.93, 15.03, 21.67,
    31.09, 33.29, 22.61, 26.89, 23.48, 8.38, 27.81, 32.35, 23.84
]
daily_hours = [dm / 60 for dm in daily_minutes]

print(stat.mean(num_friends))
print(stat.median(num_friends))

assert stat.quantile(num_friends, 0.10) == 1
assert stat.quantile(num_friends, 0.25) == 3
assert stat.quantile(num_friends, 0.75) == 9
assert stat.quantile(num_friends, 0.90) == 13

assert set(stat.mode(num_friends)) == {1, 6}

assert stat.data_range(num_friends) == 99

assert 81.54 < stat.variance(num_friends) < 81.55

assert 9.02 < stat.standard_deviation(num_friends) < 9.04
예제 #7
0
    # 3. classfy and plot grid with k = 1, 3, 5
    #classify_and_plot_grid(1)
    #classify_and_plot_grid(3)
    #classify_and_plot_grid(5)

    # 4. dim 1~1000, get the avg distance and min distance
    dimensions = range(1, 101, 5)  # (1,101,1) for graph
    avg_distances = []
    min_distances = []
    min_over_avg = []

    random.seed(0)
    for dim in dimensions:
        distances = random_distances(dim, 10000)  # 10,000 random pairs
        avg_distances.append(mean(distances))  # track the average
        min_distances.append(min(distances))  # track the minimum
        min_over_avg.append(min(distances) /
                            mean(distances))  # track the minimum / mean
        print(dim, min(distances), mean(distances),
              min(distances) / mean(distances))

    # 5. distance problem when high dim
    plt.plot(avg_distances, label="avg_distances")
    plt.plot(min_distances, label="mindistances")
    plt.legend()
    plt.show()

    # 6. min_over_avg
    plt.plot(min_over_avg)
    plt.show()