示例#1
0
(m,n)= X.shape
theta = np.ones((n+1))
lmd=1
cost,grad = linear_cost_function(np.column_stack((np.ones(m),X)),Y,theta,lmd)
print('Cost at theta = [1  1]: {:0.6f}\n(this value should be about 303.993192)'.format(cost))
print('Gradient at theta = [1  1]: {}\n(this value should be about [-15.303016  598.250744]'.format(grad))

# =========================== 3.训练线性回归
lmd = 0
theta = train_linear_reg(np.column_stack((np.ones(m),X)),Y,lmd)
plt.plot(X,np.column_stack((np.ones(m),X)).dot(theta))
# plt.show()

# =========================== 4.线性回归的学习曲线 ==============
lmd = 0
error_train,error_val = learning_curve(np.column_stack((np.ones(m),X)),Y,
						np.column_stack((np.ones(Yval.size),Xval)),Yval,lmd)
plt.figure(2)
plt.plot(range(m),error_train,range(m),error_val)
plt.title('Learning Curve for Linear Regression')
plt.legend(['Train', 'Cross Validation'])
plt.xlabel('Number of Training Examples')
plt.ylabel('Error')
plt.axis([0, 13, 0, 150])
# plt.show()

# =============================== 5.投影特征为多项式 ================
p = 8
# 投影和标准化训练集
X_poly = ploy_feature(X,p)
X_poly,mu,sigma = feature_nomalize(X_poly)
X_poly = np.column_stack((np.ones(Y.size),X_poly))
示例#2
0
# Plot fit over the data
plt.plot(X, np.dot(np.c_[np.ones(m), X], theta))

input('Program paused. Press ENTER to continue')

# ===================== Part 5: Learning Curve for Linear Regression =====================
# Next, you should implement the learning_curve function.
#
# Write up note : Since the model is underfitting the data, we expect to
#                 see a graph with "high bias" -- Figure 3 in ex5.pdf
#

lmd = 0
error_train, error_val = lc.learning_curve(np.c_[np.ones(m), X], y,
                                           np.c_[np.ones(Xval.shape[0]),
                                                 Xval], yval, lmd)

plt.figure()
plt.plot(np.arange(1, 1 + m), error_train, np.arange(1, 1 + m), error_val)
plt.title('Learning Curve for Linear Regression')
plt.legend(['Train', 'Cross Validation'])
plt.xlabel('Number of Training Examples')
plt.ylabel('Error')
plt.axis([0, 13, 0, 150])
#plt.xticks(list(range(0,13,2)))
#plt.yticks(list(range(0,200,50)))

input('Program paused. Press ENTER to continue')

# ===================== Part 6 : Feature Mapping for Polynomial Regression =====================
示例#3
0
                              train_lambda)
    plt.figure()
    plt.plot(X, y, 'rx', markersize=10)
    plt.plot(X, np.dot(np.append(np.ones((m, 1)), X, axis=1), result['x']),
             '--')
    plt.xlabel('Change in water level (x)')
    plt.ylabel('Water flowing out of the dam (y)')
    plt.axis([-60, 40, -10, 40])
    plt.pause(2)
    plt.close()
    print('Program paused. Press enter to continue.\n')
    # pause_func()

    # =========== Part 5: Learning Curve for Linear Regression =============
    curve_lambda = 0
    error_train, error_val = learning_curve(X, y, Xval, yval, curve_lambda)
    plt.figure()
    plt.plot(np.arange(m), error_train)
    plt.plot(np.arange(m), error_val)
    plt.title('Learning curve for linear regression')
    plt.xlabel('Number of training examples')
    plt.ylabel('Error')
    plt.legend(['Train', 'Cross Validation'])

    print('# Training Examples\tTrain Error\tCross Validation Error\n')
    for i in range(m):
        print('\t%d\t\t%f\t%f\n' % (i, error_train[i], error_val[i]))

    plt.pause(2)
    plt.close()
    print('Program paused. Press enter to continue.\n')