def wine_test(epochs=200, batch_size=16, lr=0.1): print( 'wine test: params - epochs {0}, batch_size: {1}, learning rate: {2}'. format(epochs, batch_size, lr)) features, target = load_wine(return_X_y=True) # Make a train/test split using 30% test size RANDOM_STATE = 42 X_train, X_test, y_train, y_test = train_test_split( features, target, test_size=0.9, random_state=RANDOM_STATE) sc = StandardScaler( ) # makes every features zero mean standard deviation 1 - makes learning problem much easier (massages the error function so sgd works better) sc.fit(X_train) s = SoftmaxClassifier(num_classes=3) X_train = sc.transform(X_train) X_train = np.c_[np.ones(X_train.shape[0]), X_train] # adds bias var X_test = sc.transform(X_test) X_test = np.c_[np.ones(X_test.shape[0]), X_test] # adds bias var s.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, lr=lr) print('Softmax Wine Classifier') print_score(s, X_train, X_test, y_train, y_test) hist = s.history fig, ax = plt.subplots() ax.plot(np.array(range(1, 1 + len(hist))), hist, 'b-x') ax.set_title('Cost as a function of epoch for wine data') ax.set_xlabel('epoch') ax.set_ylabel('Ein (1/n NLL)') export_fig(fig, 'softmax_wine_cost_per_epoch.png') plt.show()
def digits_visualize(epochs=1, batch_size=64, lr=0.01): sc = SoftmaxClassifier(num_classes=10) X_train, y_train = load_digits_train_data() sc.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, lr=lr) w = sc.W rs = w.reshape(28, 28, 10, order='F') rs2 = np.transpose(rs, axes=[1, 0, 2]) fig, ax = plt.subplots() ax.imshow(rs2.reshape(28, -1, order='F'), cmap='bone') ax.set_title('digits weight vector visualized') export_fig(fig, 'softmax_weight_vector.png') plt.show()
def digits_test(epochs=10, batch_size=32, lr=0.05): print( 'digits test: params - epochs {0}, batch_size: {1}, learning rate: {2}' .format(epochs, batch_size, lr)) sc = SoftmaxClassifier(num_classes=10) X_train, y_train = load_digits_train_data() X_test, y_test = load_digits_test_data() sc.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, lr=lr) print_score(sc, X_train, X_test, y_train, y_test) fig, ax = plt.subplots() hist = sc.history ax.plot(np.array(range(1, 1 + len(hist))), hist, 'b-x') ax.set_xlabel('epoch') ax.set_ylabel('Ein (1/n NLL)') ax.set_title('softmax cost on digits as function of epoch') export_fig(fig, 'softmax_cost_per_epoch.png') plt.show()
import numpy as np from data import make_spiral_data, plot_2d_data from softmax import SoftmaxClassifier if __name__ == '__main__': # generate spiral data n_classes = 3 data, y = make_spiral_data(100, n_classes, 2) fig = plot_2d_data(data, y) fig.show() # train model model = SoftmaxClassifier(n_classes) model.fit(data, y) print("Training accuracy is {:0.2f}".format(model.training_accuracy)) print("Training loss is {:0.3f}".format(model.training_loss)) boundaries = model.plot_boundaries(data, y) boundaries.show() loss_vs_epoch = model.plot_training_loss() loss_vs_epoch.show() # test model against 'new' data new_data, new_y = make_spiral_data(50, n_classes, 2) predictions = model.predict(new_data) print("Test accuracy is {:0.2f}".format(model.accuracy(new_data, new_y)))