Ejemplo n.º 1
0
 def test_cost(self):
     self.assertAlmostEqual(
         compute_cost(self.val[0], self.val[1], self.val[2]),
         65591548106.457443, 6)
     self.assertAlmostEqual(
         compute_cost(self.val[0], np.array(self.best_theta), self.val[2]),
         2062961418.085968, 6)
Ejemplo n.º 2
0
 def test_normal_eq_and_gradient_descent(self):
     self.assertAlmostEqual(
         compute_cost(self.val[0], self.super_best_theta, self.val[2]),
         compute_cost(self.val[0], normal_equation(self.val[0],
                                                   self.val[2]),
                      self.val[2]), 5)
Ejemplo n.º 3
0
X = np.vstack(data[:, 0])  # convert to vertical array ... column vector
y = np.vstack(data[:, 1])
m = len(y)

# plot the data
kit.plot_data(X, y, 'Price (x $10k)', 'Population (x 10k)')

# cost function
one_v = np.reshape(np.ones(m), (m, 1))  # alternative to vstack
X1 = np.concatenate((one_v, X), axis=1)  # two columns
theta = np.array([[0.0], [0.0]])

# compute and display initial cost :: ans = 32.07
import linear_regression as lr

J = lr.compute_cost(X1, y, theta)
print("With theta = [0 ; 0] ... Cost computed = {:7.3f}".format(J))

# further testing of the cost function :: ans = 54.24
J = lr.compute_cost(X1, y, [[-1.0], [2.0]])
print("With theta = [-1 ; 2] ... Cost computed = {:7.3f}".format(J))

# Some gradient descent settings
iterations = 1500
alpha = 0.01

# run gradient descent  :: ans = [ [-3.6303],  [1.1664] ]
theta, J_history = lr.gradient_descent(X1, y, theta, alpha, iterations)
print("Calculated theta = \n", theta)

# predict values for population sizes of 35,000 and 70,000
def main():
    #load sample data
    #mengisi contoh data
    data = multivariasi.load_data_single()
    X_, y = data[:, 0], data[:, 1]
    X = np.ones([y.size, 2])
    X[:, 1] = X_

    #compute theta
    #menghitung theta
    m, dim = X.shape
    theta = np.zeros([dim, 1])
    alpha, max_iter = 0.01, 300
    theta = linear_regression.gradient_descent(theta, X, y, alpha, max_iter)
    print theta

    #plot sample data and predicted line
    #contoh plot data dan melakukan prediksi garis
    plt.subplot(2, 1, 1)
    plt.scatter(data[:, 0], data[:, 1], color='r', marker='x')
    xx = np.linspace(-10, 10)
    yy = theta[0] + theta[1] * xx
    plt.plot(xx, yy, 'k-')

    #plot contour
    #bentuk permukaan plot
    theta0_vals = np.linspace(-10, 10, 100)
    theta1_vals = np.linspace(-1, 4, 100)

    #initialize J_vals to a matrix of 0's
    #mengenalkan J_vals pada sebuah matrix dari 0
    J_vals = np.zeros(shape=(theta0_vals.size, theta1_vals.size))

    #fill out J_vals
    #mengisi hasil keluaran J_vals
    for t1, element in enumerate(theta0_vals):
        for t2, element2 in enumerate(theta1_vals):
            thetaT = np.zeros(shape=(2, 1))
            thetaT[0][0] = element
            thetaT[1][0] = element2
            J_vals[t1, t2] = linear_regression.compute_cost(thetaT, X, y)

    #contour plot
    #bentuk permukaan plot
    J_vals = J_vals.T
    #plot J_vals as 15 contours spaced logarithmically between 0.01 and 100
    #plot J_vals pada 15 bentuk permukaan yang terisi secara logaritma diantara 0.01 dan 100
    plt.subplot(2, 1, 2)
    plt.contour(theta0_vals, theta1_vals, J_vals, np.logspace(-2, 3, 40))
    plt.xlabel('theta_0')
    plt.ylabel('theta_1')
    plt.scatter(theta[0][0], theta[1][0])

    #3D contour and scatter plot
    #bentuk permukaan 3D dan plot yang betebaran
    theta0_vals, theta1_vals = np.meshgrid(theta0_vals, theta1_vals)
    fig = plt.figure()
    ax = fig.gca(projection='3d')
    plt.hold(True)

    ax.plot_surface(theta0_vals, theta1_vals, J_vals,
                    cmap=cm.coolwarm, rstride=3, cstride=3,
                    antialiased=True)

    ax.view_init(elev=60, azim=50)
    ax.dist = 8

    x_sct, y_sct = theta[0][0], theta[1][0]
    thetaT_sct = np.zeros(shape=(2, 1))
    thetaT_sct[0][0] = theta[0][0]
    thetaT_sct[1][0] = theta[1][0]
    z_sct = linear_regression.compute_cost(thetaT_sct, X, y)
    ax.scatter(x_sct, y_sct, z_sct)

    plt.show()
Ejemplo n.º 5
0
def main():
    # load sample data
    data = multivariate_normal.load_data_single()
    X_, y = data[:, 0], data[:, 1]
    X = np.ones([y.size, 2])
    X[:, 1] = X_

    # compute theta
    m, dim = X.shape
    theta = np.zeros([dim, 1])
    alpha, max_iter = 0.01, 300
    theta = linear_regression.gradient_descent(theta, X, y, alpha, max_iter)
    print theta

    # plot sample data and predicted line
    plt.subplot(2, 1, 1)
    plt.scatter(data[:, 0], data[:, 1], color='r', marker='x')
    xx = np.linspace(-10, 10)
    yy = theta[0] + theta[1] * xx
    plt.plot(xx, yy, 'k-')

    # plot contour
    theta0_vals = np.linspace(-10, 10, 100)
    theta1_vals = np.linspace(-1, 4, 100)

    # initialize J_vals to a matrix of 0's
    J_vals = np.zeros(shape=(theta0_vals.size, theta1_vals.size))

    # Fill out J_vals
    for t1, element in enumerate(theta0_vals):
        for t2, element2 in enumerate(theta1_vals):
            thetaT = np.zeros(shape=(2, 1))
            thetaT[0][0] = element
            thetaT[1][0] = element2
            J_vals[t1, t2] = linear_regression.compute_cost(thetaT, X, y)

    # Contour plot
    J_vals = J_vals.T
    # Plot J_vals as 15 contours spaced logarithmically between 0.01 and 100
    plt.subplot(2, 1, 2)
    plt.contour(theta0_vals, theta1_vals, J_vals, np.logspace(-2, 3, 40))
    plt.xlabel('theta_0')
    plt.ylabel('theta_1')
    plt.scatter(theta[0][0], theta[1][0])

    # 3D contour and scatter plot
    theta0_vals, theta1_vals = np.meshgrid(theta0_vals, theta1_vals)
    fig = plt.figure()
    ax = fig.gca(projection='3d')
    plt.hold(True)

    ax.plot_surface(theta0_vals, theta1_vals, J_vals,
                    cmap=cm.coolwarm, rstride=3, cstride=3,
                    antialiased=True)

    ax.view_init(elev=60, azim=50)
    ax.dist = 8

    x_sct, y_sct = theta[0][0], theta[1][0]
    thetaT_sct = np.zeros(shape=(2, 1))
    thetaT_sct[0][0] = theta[0][0]
    thetaT_sct[1][0] = theta[1][0]
    z_sct = linear_regression.compute_cost(thetaT_sct, X, y)
    ax.scatter(x_sct, y_sct, z_sct)

    plt.show()