Example #1
0
# Fill out J_vals and save plots in fig3a.pdf and fig3b.pdf

linear_reg2 = LinearReg_SquaredLoss()

for i in range(len(theta0_vals)):
    for j in range(len(theta1_vals)):
      linear_reg2.theta = np.array([theta0_vals[i], theta1_vals[j]]).T 
      J_vals[i,j],_ = linear_reg2.loss(XX,y)
          
# Surface and contour plots

# Need to transpose J_vals before calling plot functions

J_vals = J_vals.T
tt1,tt2 = np.meshgrid(theta0_vals,theta1_vals)
plot_utils.make_surface_plot(tt1,tt2,J_vals,'$Theta_0$','$Theta_1$')
plt.savefig('fig3a.pdf')
plot_utils.make_contour_plot(tt1,tt2,J_vals,np.logspace(-10,40,200),'$Theta_0$','$Theta_1$',linear_reg1.theta)

plt.savefig('fig3b.pdf')

########################################################################
# ============= Part 4: Using sklearn's linear_model ==================#
########################################################################

# Check if the model you learned using gradient descent matches the one
# that sklearn's linear regression model learns on the same data.

from sklearn import linear_model
lr = linear_model.LinearRegression()
lr.fit(XX,y)