Exemplo n.º 1
0
def train(alpha, n_iter, plot=False):
    size_img, rows, cols, images = utils.read_images(
        TRAINING_IMAGES_PATH
    )
    size_lbl, labels = utils.read_labels(TRAINING_LABELS_PATH)

    images = images[range(0, M_TRAINING), :]
    labels = labels[range(0, M_TRAINING), :]
    size_img = M_TRAINING

    bias_terms = np.ones([size_img, 1], dtype=np.float64)
    images = np.concatenate((bias_terms, images), axis=1).astype(np.float64)
    thetas = np.zeros([rows*cols+1, N_LABELS], dtype=np.float64)
    costs = np.zeros([n_iter, N_LABELS])
    X = images / 255
    for i in range(N_LABELS):
        # print 'Training a classifier for label {0}'.format(i)
        y = np.array([[1 if label == i else 0 for label in labels]]).T
        thetas[:, i:i+1], costs[:, i:i+1] = func.gradient_descent(
            thetas[:, i:i+1],
            y, X, alpha,
            n_iter
        )
        if plot:
            plt.plot(costs[:, i:i+1])
            plt.show()
    return thetas
Exemplo n.º 2
0
 def gradient_descent_opt(self):
     tt = gradient_descent(self.alpha, self.x, self.y, self.theta,
                           self.default_cost, self.default_gradient,
                           self.niterations, self.atol)
     self.theta = tt[0]
     self.gd = tt
Exemplo n.º 3
0
input('Program paused. Press enter to continue.\n')
plt.close()

# ########## Part3: Gradient descent ##########
print('Running Gradient Descent ...\n')

X = np.hstack((np.ones((m, 1)), X))
theta = np.zeros((2, 1))

iterations = 1500
alpha = 0.01

print(compute_cost(X, y, theta))

theta = gradient_descent(X, y, theta, alpha, iterations)
print('Theta found by gradient descent:')
print(theta[0, 0], theta[1, 0], '\n')

plt.scatter(X[:, 1], y, color='red', marker='x', label='Training data')
plt.xlim([4, 24])
plt.ylim([-5, 25])
plt.xlabel('Population of City in 10,000s')
plt.ylabel('Profit in $10,000s')
plt.plot(X[:, 1], np.dot(X, theta), label='Linear regression')
plt.legend(loc='lower right', scatterpoints=1)
plt.show()

predict1 = np.dot(np.array([[1, 3.5]]), theta)[0, 0]
print('For population = 35,000, we predict a profit of {0}'.format(predict1 *
                                                                   10000))
import numpy as np
from functions import gradient_descent


def function_2(x):
    return x[0]**2 + x[1]**2


init_x = np.array([-3.0, 4.0])
print(gradient_descent(function_2, init_x=init_x, lr=0.1, step_num=100))
init_x = np.array([-3.0, 4.0])
print(gradient_descent(function_2, init_x=init_x, lr=10.0, step_num=100))
init_x = np.array([-3.0, 4.0])
print(gradient_descent(function_2, init_x=init_x, lr=1e-10, step_num=100))
Exemplo n.º 5
0
        w = fun.ridge(X, Y, M, i)
        y_plot = np.dot(fun.phi(x_plot, M), w)
        plt.plot(x_plot, y_plot, label='lambda = ' + str(i), linewidth=1.5)
    plt.legend(loc=1, fontsize=10)
    plt.xlabel('x')
    plt.xticks(np.linspace(0, 1, 5))
    plt.ylabel('y')
    plt.title('M = ' + str(M))


plot_ridge(1e-7)
plot_ridge(1)
plot_ridge2(10)
#For M = 10

w, k = fun.gradient_descent(fSSE, gSSE, w3 * 0, 0.05, 1e-07, maxiter=100000)

fig = plt.figure()

fig.add_subplot(141)
plot_ridge(1e-7)

fig.add_subplot(142)
plot_ridge(1e-4)

fig.add_subplot(143)
plot_ridge(1e-1)

fig.add_subplot(144)
plot_ridge(1)
plt.figure(figsize=(3, 4))