def estimate_beta(x, y):
    beta_initial = [random.random() for x_i in x[0]]
    return minimize_stochastic(squared_error, 
                               squared_error_gradient, 
                               x, y, 
                               beta_initial, 
                               0.001)            
def estimate_beta(x, y):
    beta_initial = [random.random() for x_i in x[0]]
    return minimize_stochastic(squared_error, 
                               squared_error_gradient, 
                               x, y, 
                               beta_initial, 
                               0.001)            
def estimate_beta_ridge(x, y, alpha):
    beta_initial = [random.random() for x_i in x[0]]
    return minimize_stochastic(partial(squared_error_ridge, alpha=alpha),
                               partial(squared_error_ridge_gradient, alpha=alpha),
                               x, y,
                               beta_initial,
                               0.001)
Example #4
0
 def fit(self, x: np.ndarray, y: np.array):
     beta_initial = [random.random() for x_i in x[0]]
     print(beta_initial)
     self.beta = minimize_stochastic(safe(self.squared_error), 
                             safe(self.squared_error_gradient),
                             x, y,   
                             beta_initial, 
                             self.learning_rate)
Example #5
0
def estimate_beta_ridge(x, y, alpha):
    """use gradient descent to fit a ridge regression
    with penalty alpha"""
    beta_initial = [random.random() for x_i in x[0]]
    return minimize_stochastic(
        partial(squared_error_ridge, alpha=alpha),
        partial(squared_error_ridge_gradient, alpha=alpha), x, y, beta_initial,
        0.001)
def estimate_beta(caracteristicasPropiedades, valoresPropiedades):
    primerosBetas = [
        random.random()
        for caracteristicasPropiedad_i in caracteristicasPropiedades[0]
    ]
    return minimize_stochastic(errorCuadratico, squared_error_gradient,
                               caracteristicasPropiedades, valoresPropiedades,
                               primerosBetas, 0.01)
def estimate_beta_ridge(x, y, alpha):
    """use gradient descent to fit a ridge regression
    with penalty alpha"""
    beta_initial = [random.random() for x_i in x[0]]
    return minimize_stochastic(partial(squared_error_ridge, alpha=alpha), 
                               partial(squared_error_ridge_gradient, 
                                       alpha=alpha), 
                               x, y, 
                               beta_initial, 
                               0.001)
Example #8
0
def estimate_beta_ridge(x, y, alpha):
    beta_initial = [random.random() for x_i in x[0]]
    return minimize_stochastic(
        partial(squared_error_ridge, alpha=alpha),
        partial(squared_error_ridge_gradient, alpha=alpha), x, y, beta_initial,
        0.001)
    alpha, beta = least_squares_fit(num_friends_good, daily_minutes_good)
    print "alpha", alpha
    print "beta", beta
    print ""
    print "part1: num_friends_good = 21 ; daily_minutes_good = 44.54" + str(predict(alpha, beta, 21))
    print "part2: num_friends_good = 19 ; daily_minutes_good = 51.4" + str(predict(alpha, beta, 19))
    print "part3: num_friends_good = 40 ; daily_minutes_good = 52.08" + str(predict(alpha, beta, 40))
    print "part4: num_friends_good = 18 ; daily_minutes_good = 31.22" + str(predict(alpha, beta, 18))
    print "part5: num_friends_good = 15 ; daily_minutes_good = 38.79" + str(predict(alpha, beta, 15))
    print ""
    print "r-squared", r_squared(alpha, beta, num_friends_good, daily_minutes_good)

    print

    print "gradient descent:"
    # choose random value to start
    random.seed(0)
    theta = [random.random(), random.random()]
    alpha, beta = minimize_stochastic(squared_error, 
                                      squared_error_gradient,
                                      num_friends_good,
                                      daily_minutes_good, 
                                      theta,
                                      0.0001)
    print "alpha", alpha
    print "beta", beta
    print "part1: num_friends_good = 21 ; daily_minutes_good = 44.54" + str(predict(alpha, beta, 21))
    print "part2: num_friends_good = 19 ; daily_minutes_good = 51.4" + str(predict(alpha, beta, 19))
    print "part3: num_friends_good = 40 ; daily_minutes_good = 52.08" + str(predict(alpha, beta, 40))
    print "part4: num_friends_good = 18 ; daily_minutes_good = 31.22" + str(predict(alpha, beta, 18))
    print "part5: num_friends_good = 15 ; daily_minutes_good = 38.79" + str(predict(alpha, beta, 15))
    ]  # beta partial derivative


if __name__ == "__main__":

    x = np.linspace(-10, 10, 1000)
    noise = np.random.uniform(-1, 1, x.size)
    y = 3 * x + 1 + noise

    #num_friends_good = x
    #daily_minutes_good = y

    #num_friends_good = [49,41,40,25,21,21,19,19,18,18,16,15,15,15,15,14,14,13,13,13,13,12,12,11,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,8,8,8,8,8,8,8,8,8,8,8,8,8,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
    #daily_minutes_good = [68.77,51.25,52.08,38.36,44.54,57.13,51.4,41.42,31.22,34.76,54.01,38.79,47.59,49.1,27.66,41.03,36.73,48.65,28.12,46.62,35.57,32.98,35,26.07,23.77,39.73,40.57,31.65,31.21,36.32,20.45,21.93,26.02,27.34,23.49,46.94,30.5,33.8,24.23,21.4,27.94,32.24,40.57,25.07,19.42,22.39,18.42,46.96,23.72,26.41,26.97,36.76,40.32,35.02,29.47,30.2,31,38.11,38.18,36.31,21.03,30.86,36.07,28.66,29.08,37.28,15.28,24.17,22.31,30.17,25.53,19.85,35.37,44.6,17.23,13.47,26.33,35.02,32.09,24.81,19.33,28.77,24.26,31.98,25.73,24.86,16.28,34.51,15.23,39.72,40.8,26.06,35.76,34.76,16.13,44.04,18.03,19.65,32.62,35.59,39.43,14.18,35.24,40.13,41.82,35.45,36.07,43.67,24.61,20.9,21.9,18.79,27.61,27.21,26.61,29.77,20.59,27.53,13.82,33.2,25,33.1,36.65,18.63,14.87,22.2,36.81,25.53,24.62,26.25,18.21,28.08,19.42,29.79,32.8,35.99,28.32,27.79,35.88,29.06,36.28,14.1,36.63,37.49,26.9,18.58,38.48,24.48,18.95,33.55,14.24,29.04,32.51,25.63,22.22,19,32.73,15.16,13.9,27.2,32.01,29.27,33,13.74,20.42,27.32,18.23,35.35,28.48,9.08,24.62,20.12,35.26,19.92,31.02,16.49,12.16,30.7,31.22,34.65,13.13,27.51,33.2,31.57,14.1,33.42,17.44,10.12,24.42,9.82,23.39,30.93,15.03,21.67,31.09,33.29,22.61,26.89,23.48,8.38,27.81,32.35,23.84]

    alpha, beta = least_squares_fit(x, y)
    print("alpha", alpha)
    print("beta", beta)

    print("r-squared", r_squared(alpha, beta, x, y))

    print()

    print("gradient descent:")
    # choose random value to start
    random.seed(0)
    theta = [random.random(), random.random()]
    alpha, beta = minimize_stochastic(squared_error, squared_error_gradient, x,
                                      y, theta, 0.0001)
    print("alpha", alpha)
print("beta", beta)
                          33.1, 36.65, 18.63, 14.87, 22.2, 36.81, 25.53, 24.62, 26.25, 18.21, 28.08, 19.42, 29.79, 32.8,
                          35.99, 28.32, 27.79, 35.88, 29.06, 36.28, 14.1, 36.63, 37.49, 26.9, 18.58, 38.48, 24.48,
                          18.95, 33.55, 14.24, 29.04, 32.51, 25.63, 22.22, 19, 32.73, 15.16, 13.9, 27.2, 32.01, 29.27,
                          33, 13.74, 20.42, 27.32, 18.23, 35.35, 28.48, 9.08, 24.62, 20.12, 35.26, 19.92, 31.02, 16.49,
                          12.16, 30.7, 31.22, 34.65, 13.13, 27.51, 33.2, 31.57, 14.1, 33.42, 17.44, 10.12, 24.42, 9.82,
                          23.39, 30.93, 15.03, 21.67, 31.09, 33.29, 22.61, 26.89, 23.48, 8.38, 27.81, 32.35, 23.84]

    alpha, beta = least_squares_fit(num_friends_good, daily_minutes_good)
    print("alpha", alpha)
    print("beta", beta)
    xs = sorted(num_friends_good)
    ys = [alpha + (beta * x) for x in xs]
    print(xs)
    print(ys)
    plt.plot(xs, ys)
    plt.scatter(num_friends_good, daily_minutes_good)
    plt.axis([0, 50, 0, 100])
    # plt.show()
    print(r_squared(alpha, beta, num_friends_good, daily_minutes_good))

    random.seed(0)
    theta = [random.random(), random.random()]

    alpha, beta = minimize_stochastic(squared_error,
                                      squared_error_gradient,
                                      num_friends_good,
                                      daily_minutes_good,
                                      theta,
                                      0.0001)
    print(alpha, beta)
Example #12
0
def est_beta(x, y):
    beta_initial = [random.random() for x_i in x[0]]
    return gd.minimize_stochastic(sqrd_err, sqrd_err_grad, x, y, beta_initial,
                                  0.001)
Example #13
0
def est_beta_ridge(x, y, alpha):
    beta_init = [random.random() for x_i in x[0]]
    return gd.minimize_stochastic(partial(sqrd_err_ridge, alpha=alpha),
                                  partial(sqrd_err_rid_grad, alpha=alpha), x,
                                  y, beta_init, 0.001)