def estimate_beta_ridge(x, y, alpha):
    # use gradient descent to fit a ridge regression with penalty alpha
    beta_initial = [random.random() for x_i in x[0]]
    return minimize_stochastic(
        partial(squared_error_ridge, alpha=alpha),
        partial(squared_error_ridge_gradient, alpha=alpha), x, y, beta_initial,
        0.001)
def estimate_beta(x, y):
    beta_initial = [random.random() for x_i in x[0]]
    return minimize_stochastic(squared_error, squared_error_gradient, x, y,
                               beta_initial, 0.001)
Exemple #3
0
    return error(alpha, beta, x_i, y_i)**2


def squared_error_gradient(x_i, y_i, theta):
    alpha, beta = theta
    return [
        -2 * error(alpha, beta, x_i, y_i),  #alpha partial derivative
        -2 * error(alpha, beta, x_i, y_i) * x_i
    ]  #beta partial derivative


# choose random value to start
random.seed(0)
theta = [random.random(), random.random()]
alpha, beta = minimize_stochastic(squared_error, squared_error_gradient,
                                  num_friends_good, daily_minutes_good, theta,
                                  0.0001)
print alpha, beta

if __name__ == "__main__":

    num_friends_good = [
        49, 41, 40, 25, 21, 21, 19, 19, 18, 18, 16, 15, 15, 15, 15, 14, 14, 13,
        13, 13, 13, 12, 12, 11, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
        10, 10, 10, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 8, 8,
        8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
        7, 7, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
        5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4,
        4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
        3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
        2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1