def test_learning_curve_poly(self): highest_degree = 8 X_poly = map_poly_features(self.X, highest_degree) X_poly, mu, sigma = feature_normalization(X_poly) Xval_poly = map_poly_features(self.Xval, highest_degree) Xval_poly = (Xval_poly - mu) / sigma lamda = 0.0 error_train, error_val = learning_curve(X_poly, self.y, Xval_poly, self.yval, lamda) plt.xlabel('Number of Training Examples') plt.ylabel("Error") plt.plot( range(1,self.m+1), error_train) plt.plot( range(1,self.m+1), error_val) plt.show()
def test_learning_curve_poly(self): highest_degree = 8 X_poly = map_poly_features(self.X, highest_degree) X_poly, mu, sigma = feature_normalization(X_poly) Xval_poly = map_poly_features(self.Xval, highest_degree) Xval_poly = (Xval_poly - mu) / sigma lamda = 0.0 error_train, error_val = learning_curve(X_poly, self.y, Xval_poly, self.yval, lamda) plt.xlabel('Number of Training Examples') plt.ylabel("Error") plt.plot(range(1, self.m + 1), error_train) plt.plot(range(1, self.m + 1), error_val) plt.show()
def test_validation_curve_poly(self): highest_degree = 8 X_poly = map_poly_features(self.X, highest_degree) X_poly, mu, sigma = feature_normalization(X_poly) Xval_poly = map_poly_features(self.Xval, highest_degree) Xval_poly = (Xval_poly - mu) / sigma lamdas = np.array([0,0.001,0.003,0.01,0.03,0.1,0.3,1,3,10]) error_train, error_val = validation_curve(X_poly, self.y, Xval_poly, self.yval, lamdas) plt.xlabel('lamda') plt.ylabel("Error") plt.plot( lamdas, error_train, label='Train') plt.plot( lamdas, error_val, label='Validation') plt.legend() plt.show()
def test_validation_curve_poly(self): highest_degree = 8 X_poly = map_poly_features(self.X, highest_degree) X_poly, mu, sigma = feature_normalization(X_poly) Xval_poly = map_poly_features(self.Xval, highest_degree) Xval_poly = (Xval_poly - mu) / sigma lamdas = np.array([0, 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1, 3, 10]) error_train, error_val = validation_curve(X_poly, self.y, Xval_poly, self.yval, lamdas) plt.xlabel('lamda') plt.ylabel("Error") plt.plot(lamdas, error_train, label='Train') plt.plot(lamdas, error_val, label='Validation') plt.legend() plt.show()
def test_linear_regression_poly(self): highest_degree = 8 X_poly = map_poly_features(self.X, highest_degree) X_poly, mu, sigma = feature_normalization(X_poly) initial_theta = np.ones(highest_degree+1) lamda = 0.0 theta = train_linear_reg(initial_theta, X_poly, self.y, lamda) hypo = predictions(theta, X_poly) # now scatter the data and plot the hypothesis df = pd.DataFrame(np.hstack(( self.X, hypo.reshape(self.y.shape), self.y )), columns=['X','hypo','y']) df = df.sort('X') plt.xlabel("Change in water level (x)") plt.ylabel("Water flowing out of the dam (y)") plt.scatter( df['X'], df['y'], marker='x', c='r', s=30, linewidth=2 ) plt.plot( df['X'], df['hypo'], linestyle='--', linewidth=3 ) plt.show()
def test_linear_regression_poly(self): highest_degree = 8 X_poly = map_poly_features(self.X, highest_degree) X_poly, mu, sigma = feature_normalization(X_poly) initial_theta = np.ones(highest_degree + 1) lamda = 0.0 theta = train_linear_reg(initial_theta, X_poly, self.y, lamda) hypo = predictions(theta, X_poly) # now scatter the data and plot the hypothesis df = pd.DataFrame(np.hstack( (self.X, hypo.reshape(self.y.shape), self.y)), columns=['X', 'hypo', 'y']) df = df.sort('X') plt.xlabel("Change in water level (x)") plt.ylabel("Water flowing out of the dam (y)") plt.scatter(df['X'], df['y'], marker='x', c='r', s=30, linewidth=2) plt.plot(df['X'], df['hypo'], linestyle='--', linewidth=3) plt.show()