def learning_curve(X,Y,Xval,Yval,lmd):
	m = X.shape[0]
	error_train = np.zeros(m)
	error_val = np.zeros(m)

	for num in range(m):
		theta = train_linear_reg(X[0:num+1,:],Y[0:num+1],lmd)

		error_train[num],_ = linear_cost_function(X[0:num+1,:],Y[0:num+1],theta,lmd)
		error_val[num],_ = linear_cost_function(Xval,Yval,theta,lmd)

	return error_train,error_val
def validation_curve(X, Y, Xval, Yval):
    lambda_vec = np.array([0., 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1, 3, 10])
    error_train = np.zeros(lambda_vec.size)
    error_val = np.zeros(lambda_vec.size)

    for num in range(lambda_vec.size):
        lmd = lambda_vec[num]
        theta = train_linear_reg(X, Y, lmd)
        error_train[num], _ = linear_cost_function(X, Y, theta, lmd)
        error_val[num], _ = linear_cost_function(Xval, Yval, theta, lmd)

    return lambda_vec, error_train, error_val
Пример #3
0
Yval = data['yval'].flatten()

Xtest = data['Xtest']
Ytest = data['ytest'].flatten()

plt.figure(1)
plt.scatter(X,Y,c='r',marker='x')
plt.xlabel('Change in water level (x)')
plt.ylabel('Water folowing out of the dam (y)')
# plt.show()

# ============================ 2.计算代价和梯度 ==============================
(m,n)= X.shape
theta = np.ones((n+1))
lmd=1
cost,grad = linear_cost_function(np.column_stack((np.ones(m),X)),Y,theta,lmd)
print('Cost at theta = [1  1]: {:0.6f}\n(this value should be about 303.993192)'.format(cost))
print('Gradient at theta = [1  1]: {}\n(this value should be about [-15.303016  598.250744]'.format(grad))

# =========================== 3.训练线性回归
lmd = 0
theta = train_linear_reg(np.column_stack((np.ones(m),X)),Y,lmd)
plt.plot(X,np.column_stack((np.ones(m),X)).dot(theta))
# plt.show()

# =========================== 4.线性回归的学习曲线 ==============
lmd = 0
error_train,error_val = learning_curve(np.column_stack((np.ones(m),X)),Y,
						np.column_stack((np.ones(Yval.size),Xval)),Yval,lmd)
plt.figure(2)
plt.plot(range(m),error_train,range(m),error_val)
Пример #4
0
 def cost_func(t):
     return linear_cost_function(X, Y, t, lmd)[0]
Пример #5
0
 def grad_func(t):
     return linear_cost_function(X, Y, t, lmd)[1]