def test_linreg(): ''' Helper function that tests LinearRegression. @param: None @return: None ''' X_train, X_test, Y_train, Y_test = import_wine(WINE_FILE_PATH) num_features = X_train.shape[1] # Padding the inputs with a bias X_train_b = np.append(X_train, np.ones((len(X_train), 1)), axis=1) X_test_b = np.append(X_test, np.ones((len(X_test), 1)), axis=1) #### Stochastic Gradient Descent ###### print('---------- LINEAR REGRESSION w/ SGD ----------') sgd_model = LinearRegression(num_features, sgd=True) sgd_model.train(X_train_b, Y_train) print('Average Training Loss:', sgd_model.average_loss(X_train_b, Y_train)) print('Average Testing Loss:', sgd_model.average_loss(X_test_b, Y_test)) #### Matrix Inversion ###### print('---- LINEAR REGRESSION w/ Matrix Inversion ---') solver_model = LinearRegression(num_features) solver_model.train(X_train_b, Y_train) print('Average Training Loss:', solver_model.average_loss(X_train_b, Y_train)) print('Average Testing Loss:', solver_model.average_loss(X_test_b, Y_test))
def test_linreg(): ''' Helper function that tests LinearRegression. @param: None @return: None ''' m = np.array([[2, 3], [1, 0]]) mm = np.array([[2, 3, 4, 5], [1, 1, 3, 0]]) for l in range(2): print(mm[l, range(2)]) ###print(m) n = np.append(m, np.ones((len(m), 1)), axis=1) #print(n) #print(m.shape[1]) s = LinearRegression(m.shape[1]) #print(s.weights) X_train, X_test, Y_train, Y_test = import_wine(WINE_FILE_PATH) #print(X_train.shape[1]) num_features = X_train.shape[1] # Padding the inputs with a bias X_train_b = np.append(X_train, np.ones((len(X_train), 1)), axis=1) X_test_b = np.append(X_test, np.ones((len(X_test), 1)), axis=1) #print(6.7**2) #### Matrix Inversion ###### print('---- LINEAR REGRESSION w/ Matrix Inversion ---') solver_model = LinearRegression(num_features) solver_model.train(X_train_b, Y_train) print('Average Training Loss:', solver_model.average_loss(X_train_b, Y_train)) print('Average Testing Loss:', solver_model.average_loss(X_test_b, Y_test))
def test_models(dataset, epochs, test_size=0.2): ''' Tests LinearRegression, OneLayerNN, TwoLayerNN on a given dataset. :param dataset The path to the dataset :return None ''' # Check if the file exists if not os.path.exists(dataset): print('The file {} does not exist'.format(dataset)) exit() # Load in the dataset data = np.loadtxt(dataset, skiprows=1) X, Y = data[:, 1:], data[:, 0] # Normalize the features X = (X - np.mean(X, axis=0)) / np.std(X, axis=0) X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=test_size) print('Running models on {} dataset'.format(dataset)) #### Linear Regression ###### print('----- LINEAR REGRESSION -----') # Add a bias X_train_b = np.append(X_train, np.ones((len(X_train), 1)), axis=1) X_test_b = np.append(X_test, np.ones((len(X_test), 1)), axis=1) regmodel = LinearRegression() regmodel.train(X_train_b, Y_train) print('Average Training Loss:', regmodel.average_loss(X_train_b, Y_train)) print('Average Testing Loss:', regmodel.average_loss(X_test_b, Y_test)) #### 1-Layer NN ###### print('----- 1-Layer NN -----') nnmodel = OneLayerNN() nnmodel.train(X_train_b, Y_train, epochs=epochs, print_loss=False) print('Average Training Loss:', nnmodel.average_loss(X_train_b, Y_train)) print('Average Testing Loss:', nnmodel.average_loss(X_test_b, Y_test)) #### 2-Layer NN ###### print('----- 2-Layer NN -----') model = TwoLayerNN(5) # Use X without a bias, since we learn a bias in the 2 layer NN. model.train(X_train, Y_train, epochs=epochs, print_loss=False) print('Average Training Loss:', model.average_loss(X_train, Y_train)) print('Average Testing Loss:', model.average_loss(X_test, Y_test))