Пример #1
0
print 'Visualizing J(theta_0, theta_1) ...'

# Compute grid over which we will calculate J

theta0_vals = np.arange(-20,40, 0.1);
theta1_vals = np.arange(-4, 4, 0.1);
J_vals = np.zeros((len(theta0_vals),len(theta1_vals)))

# Fill out J_vals and save plots in fig3a.pdf and fig3b.pdf

linear_reg2 = LinearReg_SquaredLoss()

for i in range(len(theta0_vals)):
    for j in range(len(theta1_vals)):
      linear_reg2.theta = np.array([theta0_vals[i], theta1_vals[j]]).T 
      J_vals[i,j],_ = linear_reg2.loss(XX,y)
          
# Surface and contour plots

# Need to transpose J_vals before calling plot functions

J_vals = J_vals.T
tt1,tt2 = np.meshgrid(theta0_vals,theta1_vals)
plot_utils.make_surface_plot(tt1,tt2,J_vals,'$Theta_0$','$Theta_1$')
plt.savefig('fig3a.pdf')
plot_utils.make_contour_plot(tt1,tt2,J_vals,np.logspace(-10,40,200),'$Theta_0$','$Theta_1$',linear_reg1.theta)

plt.savefig('fig3b.pdf')

########################################################################
# ============= Part 4: Using sklearn's linear_model ==================#