Example #1
0
y = bdata.target

# need to scale the features (use zero mean scaling)

X_norm,mu,sigma = utils.feature_normalize(X)

# add intercept term to X_norm

XX = np.vstack([np.ones((X.shape[0],)),X_norm.T]).T

print 'Running gradient descent ..'

# set up model and train 

linear_reg3 = LinearReg_SquaredLoss()
J_history3 = linear_reg3.train(XX,y,learning_rate=0.01,num_iters=5000,verbose=False)

# Plot the convergence graph and save it in fig5.pdf

plot_utils.plot_data(range(len(J_history3)),J_history3,'Number of iterations','Cost J')
plt.savefig('fig5.pdf')

# Display the computed theta

print 'Theta computed by gradient descent: ', linear_reg3.theta


########################################################################
# ======= Part 3: Predict on unseen data with model ======= ===========#
########################################################################
Example #2
0
# need to scale the features (use zero mean scaling)

X_norm,mu,sigma = utils.feature_normalize(X)

# add intercept term to X_norm

XX = np.vstack([np.ones((X.shape[0],)),X_norm.T]).T
print X_norm

print 'Running gradient descent ..'

# set up model and train 

linear_reg3 = LinearReg_SquaredLoss()

J_history3 = linear_reg3.train(XX,y,learning_rate=0.01,num_iters=5000,verbose=False)

# Plot the convergence graph and save it in fig5.pdf

plot_utils.plot_data(range(len(J_history3)),J_history3,'Number of iterations','Cost J')
plt.savefig('fig5a.pdf')

# Display the computed theta

print 'Theta computed by gradient descent: ', linear_reg3.theta


########################################################################
# ======= Part 3: Predict on unseen data with model ======= ===========#
########################################################################