def NeuralNet(X, Y, X_test, Y_test): layers = [{"type": "fully_connected", "num_nodes": 50}] mlnn = MultilayerNeuralNetwork(D=21, F=7, layers=layers, training="regression", std_dev=0.01) model = MiniBatchSGD(net=mlnn, epochs=100, batch_size=32, alpha=0.005, eta=0.5, random_state=0, verbose=0) model.fit(X, Y) print("## Neural Net ##") printnMSE(model, X, Y, "Train data") printnMSE(model, X_test, Y_test, "Test data") return model
def calc_error_different_layers(layer1_num, layer2_num=None, layer3_num=None): if (layer2_num is None): layers = [{"type": "fully_connected", "num_nodes": nodes1}] param = str(nodes1) elif (layer3_num is None): layers = [{ "type": "fully_connected", "num_nodes": nodes1 }, { "type": "fully_connected", "num_nodes": nodes2 }] param = str(nodes1) + '_' + str(nodes2) else: layers = [{ "type": "fully_connected", "num_nodes": nodes1 }, { "type": "fully_connected", "num_nodes": nodes2 }, { "type": "fully_connected", "num_nodes": nodes3 }] param = str(nodes1) + '_' + str(nodes2) + '_' + str(nodes3) model = MultilayerNeuralNetwork(D, F, layers, training='regression', std_dev=0.01, verbose=True) mbsgd = MiniBatchSGD(net=model, batch_size=80, alpha=0.005, alpha_decay=0.99, epochs=50, verbose=0) cv_results = cross_validate(mbsgd, X_train, Y_train, cv=3, n_jobs=3) #n_jobs for parallel processing test_error = cv_results['test_score'].mean() train_error = cv_results['train_score'].mean() return train_error, test_error, param
parameters = { 'alpha': [0.0005, 0.003, 0.01], 'alpha_decay': [0.95, 0.97, 1], 'batch_size': [30, 55, 80], 'eta': [0.2, 0.5, 0.8], 'eta_inc': [0, 0.00001] } scores, params = [], [] F = Y_train.shape[1] D = (X_train.shape[1], ) model = MultilayerNeuralNetwork(D, F, layers, training='regression', std_dev=0.01, verbose=True) mbsgd = MiniBatchSGD(net=model, epochs=50, verbose=0) time_start = time.time() clf = GridSearchCV(mbsgd, parameters, cv=3, n_jobs=3) clf.fit(X_train, Y_train) time_end = time.time() print('Minutes: {:10.2f}'.format((time_end - time_start) / 60)) params = clf.cv_results_['params'] test_error = clf.cv_results_['mean_test_score'] train_error = clf.cv_results_['mean_train_score'] save_error_params( train_error, test_error, params, r'C:\Users\Markus Miller\Desktop\Uni\Machine Learning\ex05\inverse_dynamics\grid_search.csv' )
[ { "type": "fully_connected", "num_nodes": 50 } ] model = MultilayerNeuralNetwork(D, F, layers, training="regression", std_dev=0.01, verbose=True) mbsgd = MiniBatchSGD(net=model, epochs=100, batch_size=32, alpha=0.005, eta=0.5, random_state=0, verbose=2) mbsgd.fit(X, Y) ############################################################################ # Print nMSE on test set Y_pred = model.predict(X_test) for f in range(F): print("Dimension %d: nMSE = %.2f %%" % (f + 1, 100 * nMSE(Y_pred[:, f], Y_test[:, f]))) # Store learned model, you can restore it with # model = pickle.load(open("sarcos_model.pickle", "rb")) # and use it in your evaluation script
"num_nodes": 20 } ] epochs = 150 # Train neural net mlnn = MultilayerNeuralNetwork(D=(1, ), F=1, layers=layers, training="regression", std_dev=0.01, verbose=1) mbsgd = MiniBatchSGD(net=mlnn, epochs=epochs, batch_size=16, alpha=0.1, eta=0.5, random_state=0, verbose=0) mbsgd.fit(X, Y) # Test neural net X_test = np.linspace(0, 1, 100)[:, np.newaxis] Y_test = np.sin(2 * np.pi * X_test) Y_test_prediction = mlnn.predict(X_test) plt.title("Prediction") plt.scatter(X.ravel(), Y.ravel(), label="Training set (noisy)") plt.plot(X_test.ravel(), Y_test.ravel(), lw=3, label="True function") plt.plot(X_test.ravel(), Y_test_prediction.ravel(),
Y_test, X_test = target_scaler.transform(Y_test), feature_scaler.transform( X_test) layers = [{"type": "fully_connected", "num_nodes": 90}] F = Y_train.shape[1] D = (X_train.shape[1], ) model = MultilayerNeuralNetwork(D, F, layers, training='regression', std_dev=0.01, verbose=True) mbsgd = MiniBatchSGD(net=model, epochs=100, alpha=0.003, alpha_decay=1, batch_size=80, eta=0.5, eta_inc=0, verbose=2) mbsgd.fit(X_train, Y_train) Y_pred_train = model.predict(X_train) # Predict Y from training set print("Train set:") MnMSE = 100 * nMSE(Y_pred_train, Y_train) print("nMSE =", MnMSE, "%") for f in range(F): # Print nMSE for different dimensions print("Dimension %d: nMSE = %.2f %%" % (f + 1, 100 * nMSE(Y_pred_train[:, f], Y_train[:, f]))) print("") Y_pred_test = model.predict(X_test) # Predict Y from test set
layers = \ [ { "type": "fully_connected", "num_nodes": 50 } ] model = MultilayerNeuralNetwork(D, F, layers, training='regression', std_dev=0.01, verbose=True) mbsgd = MiniBatchSGD(net=model, epochs=100, batch_size=32, alpha=0.005, eta=0.5, verbose=2) time_start = time.time() train_sizes, train_scores, valid_scores = learning_curve( mbsgd, X_train, Y_train, scoring='neg_mean_squared_error', train_sizes=np.linspace(0.1, 0.9, 20), cv=5, n_jobs=-1) time_end = time.time() print('Minutes: {:10.2f}'.format((time_end - time_start) / 60)) plot_learning_curve(
"type": "fully_connected", "num_nodes": 100 } ] mlnn = MultilayerNeuralNetwork(D=(1, 28, 28), F=10, layers=layers, std_dev=0.01, verbose=1) mbsgd = MiniBatchSGD(net=mlnn, epochs=15, batch_size=32, alpha=0.01, alpha_decay=0.9999, min_alpha=0.00005, eta=0.5, eta_inc=0.00001, max_eta=0.9, random_state=0, verbose=1) mbsgd.fit(train_images, train_targets) ############################################################################ # Print accuracy and cross entropy on test set accuracy = 100 * model_accuracy(mlnn, test_images, test_labels) error = mlnn.error(test_images, test_targets) print("Accuracy on test set: %.2f %%" % accuracy) print("Error = %.3f" % error)