Exemplo n.º 1
0
X_poly_val /= sigma
X_poly_val = np.column_stack((np.ones(Yval.size),X_poly_val))

# 投影和标准化测试集
X_poly_test = ploy_feature(Xtest,p)
X_poly_test -= mu
X_poly_test /= sigma
X_poly_test = np.column_stack((np.ones(Ytest.size),X_poly_test))

print('Normalized Training Example 1 : \n{}'.format(X_poly[0]))

# ======================== 6.多项式特征的学习曲线
lmd = 0
# 绘制拟合曲线
theta = train_linear_reg(X_poly,Y,lmd)
x_fit,y_fit = plot_fit(np.min(X),np.max(X),mu,sigma,theta,p)
plt.figure(3)
plt.scatter(X,Y,c='r',marker='x')
plt.plot(x_fit,y_fit)
plt.xlabel('Change in water level (x)')
plt.ylabel('Water folowing out of the dam (y)')
plt.ylim([0, 60])
plt.title('Polynomial Regression Fit (lambda = {})'.format(lmd))
# plt.show()
# 计算代价误差
error_train, error_val = learning_curve(X_poly, Y, X_poly_val, Yval, lmd)
plt.figure(4)
plt.plot(np.arange(m), error_train, np.arange(m), error_val)
plt.title('Polynomial Regression Learning Curve (lambda = {})'.format(lmd))
plt.legend(['Train', 'Cross Validation'])
plt.xlabel('Number of Training Examples')
Exemplo n.º 2
0
input('Program paused. Press ENTER to continue')

# ===================== Part 7 : Learning Curve for Polynomial Regression =====================
# Now, you will get to experiment with polynomial regression with multiple
# values of lambda. The code below runs polynomial regression with
# lambda = 0. You should try running the code with different values of
# lambda to see how the fit and learning curve change.
#

lmd = 0
theta = tlr.train_linear_reg(X_poly, y, lmd)

# Plot trainint data and fit
plt.figure()
plt.scatter(X, y, c='r', marker="x")
plotft.plot_fit(np.min(X), np.max(X), mu, sigma, theta, p)
plt.xlabel('Change in water level (x)')
plt.ylabel('Water folowing out of the dam (y)')
plt.ylim([0, 60])
plt.title('Polynomial Regression Fit (lambda = {})'.format(lmd))

error_train, error_val = lc.learning_curve(X_poly, y, X_poly_val, yval, lmd)
plt.figure()
plt.plot(np.arange(1, 1 + m), error_train, np.arange(1, 1 + m), error_val)
plt.title('Polynomial Regression Learning Curve (lambda = {})'.format(lmd))
plt.legend(['Train', 'Cross Validation'])
plt.xlabel('Number of Training Examples')
plt.ylabel('Error')
plt.axis([0, 13, 0, 100])
#plt.xticks(list(range(0,13,2)))
#plt.yticks(list(range(0,120,10)))
Exemplo n.º 3
0
X_poly_val = X_poly_val - mu
X_poly_val = X_poly_val / sigma
X_poly_val = np.c_[np.ones(X_poly_val.shape[0]), X_poly_val]

print('Normalized Training Example 1:\n')
print('  {}  \n'.format(X_poly[0]))

input('Program paused. Press enter to continue.\n')

# =========== Part 7: Learning Curve for Polynomial Regression =============
lmd = 1
theta = train_linear_reg(X_poly, y, lmd)

# Plot training data and fit
plt.plot(X, y, 'rx', linewidth=1.5)
plot_fit(min(X), max(X), mu, sigma, theta, p)
plt.xlabel('Change in water level (x)')
plt.ylabel('Water flowing out of the dam (y)')
plt.title('Polynomial Regression Fit (lambda = {})'.format(lmd))
plt.show()

error_train, error_val = learning_curve(X_poly, y, X_poly_val, yval, lmd)
l1, l2 = plt.plot(np.arange(1, m + 1), error_train, np.arange(1, m + 1),
                  error_val)
plt.title('Polynomial Regression Learning Curve (lambda = {})'.format(lmd))
plt.legend((l1, l2), ('Train', 'Cross Validation'))
plt.xlabel('Number of training examples')
plt.ylabel('Error')
plt.axis([0, 13, 0, 100])
plt.show()