Ejemplo n.º 1
0
def test_cost_5():
    list_x = []
    list_theta = []
    list_y = []

    x = np.matrix(list_x)
    theta = np.matrix(list_theta)
    y = np.matrix(list_y)

    j = linear_regression_cost(x, y, theta)

    assert j == float('inf')
Ejemplo n.º 2
0
def test_cost_3():
    list_x = [[1, 1], [1, 2], [1, 3]]
    list_theta = [[0], [1]]
    list_y = [[1], [2]]

    x = np.matrix(list_x)
    theta = np.matrix(list_theta)
    y = np.matrix(list_y)

    j = linear_regression_cost(x, y, theta)

    assert j == float('inf')
Ejemplo n.º 3
0
def test_cost_1():
    list_x = [[1, 1], [1, 2], [1, 3]]
    list_theta = [[0], [0]]
    list_y = [[1], [2], [3]]

    x = np.matrix(list_x)
    theta = np.matrix(list_theta)
    y = np.matrix(list_y)

    j = linear_regression_cost(x, y, theta)

    assert j == 2.3333333333333335
Ejemplo n.º 4
0
def gradient_descent_base(x, y, theta, alpha, num_iters, want_history=True):
    m = len(y)
    cost_history = None
    if want_history:
        cost_history = np.zeros(num_iters)

    for idx in range(num_iters):
        temp = x.T * (x * theta - y);
        theta = theta - alpha / m * temp;

        if want_history:
            cost_history[idx] = pyml.linear_regression_cost(x, y, theta)
    return (theta, cost_history)
Ejemplo n.º 5
0
def gradient_descent_base(x, y, theta, alpha, num_iters, want_history=True):
    m = len(y)
    cost_history = None
    if want_history:
        cost_history = np.zeros(num_iters)

    for idx in range(num_iters):
        temp = x.T * (x * theta - y)
        theta = theta - alpha / m * temp

        if want_history:
            cost_history[idx] = pyml.linear_regression_cost(x, y, theta)
    return (theta, cost_history)