from neural_network.activations import * from neural_network.data_manipulation import load_easy_data from neural_network.plots import plot_data_2d, plot_measure_results_data from neural_network.learn_network import learn_network import matplotlib.pyplot as plt # load dataset train, test = load_easy_data() # x and y - observations and values for them x = train[:, 0:2] y = train[:, 2:3] # plot data classes plt.figure(figsize=(12.8, 9.6)) plt.subplot(221) plot_data_2d(x[:, 0], x[:, 1], y[:, 0], title='True classes of points on the plane', show=False) # learn model and plot result classes plt.subplot(222) mse_linear = learn_network(x, y, [20], [sigmoid, linear], iterations=100, regression=False, plot_title='Predicted classes for linear function', plot_show=False) plt.subplot(223) mse_softmax = learn_network(x, y, [20], [sigmoid, softmax], iterations=100, regression=False, plot_title='Predicted classes for softmax function', plot_show=False) plt.subplot(224) plot_measure_results_data([mse_linear, mse_softmax], labels=['linear', 'softmax'], title_base="Accuracy", ylabel="Accuracy", title_ending=" for last layer activation function", show=False) plt.show()
from neural_network.activations import * from neural_network.learn_network import learn_network from neural_network.data_manipulation import load_data from neural_network.plots import plot_data_1d_compare, plot_measure_results_data train, test = load_data('multimodal-large') x = train[:, 0:1] y = train[:, 1:2] # weights experiment mse_zeros, zeros = learn_network(x, y, [20], [sigmoid, linear], initialize_weights='zeros', iterations=100, momentum_type='normal', plot=False, return_result=True) mse_normal, normal = learn_network(x, y, [20], [sigmoid, linear], initialize_weights='norm', iterations=100, momentum_type='normal', plot=False, return_result=True) mse_Xavier, Xavier = learn_network(x, y, [20], [sigmoid, linear], initialize_weights='Xavier', iterations=100, momentum_type='normal', plot=False,
'L2, lambda: 0.0001' ] # learn model and plot result classes i = 2 activation = sigmoid iterations = 10000 no_change_epochs_to_stop = 100 mse_no_reg, res_no_reg = learn_network( x, y, neurons[:i], [activation] * i + [linear], beta=0.01, eta=0.01, epochs=1, iterations=iterations, plot_title="Prediction with sigmoid activation function and " + str(i) + " hidden layers", plot=False, return_result=True, x_test=x_test, y_test=y_test, use_test_and_stop_learning=True, no_change_epochs_to_stop=no_change_epochs_to_stop) mses = [mse_no_reg] results = [res_no_reg] for reg_type in ["L1", "L2"]: for reg_lambda in [0.01, 0.001, 0.0001]: mse_reg, res_reg = learn_network( x, y, neurons[:i], [activation] * i + [linear],
plot_data_2d(x[:, 0], x[:, 1], y[:, 0], title='True classes of points on the plane', show=False) neurons = [50, 50, 50] # learn model and plot result classes for i layer networks for i in range(1, 4): mse_linear = learn_network( x, y, neurons[:i], [linear] * i + [softmax], beta=0.01, eta=0.01, epochs=1, iterations=1000, regression=False, plot_title="Prediction with linear activation function and " + str(i) + " hidden layers", plot=False) mse_ReLU = learn_network( x, y, neurons[:i], [ReLU] * i + [softmax], beta=0.01, eta=0.01, epochs=1, iterations=1000, regression=False, plot_title="Prediction with ReLU activation function and " + str(i) +
y = train[:, 1:2] # neurons in each layer neurons = [50, 50, 50] # labels for plots labels = ['linear', 'ReLU', 'sigmoid', 'tanh'] # learn model and plot result classes plt.figure(figsize=(12.8, 19.2)) for i in range(1, 4): mse_linear, res_linear = learn_network( x, y, neurons[:i], [linear] * i + [linear], beta=0.01, eta=0.01, epochs=1, iterations=300, plot_title="Prediction with linear activation function and " + str(i) + " hidden layers", plot=False, return_result=True) mse_ReLU, res_ReLU = learn_network( x, y, neurons[:i], [ReLU] * i + [linear], beta=0.01, eta=0.01, epochs=1, iterations=300, plot_title="Prediction with ReLU activation function and " + str(i) + " hidden layers",
from neural_network.activations import * from neural_network.data_manipulation import load_data from neural_network.plots import plot_measure_results_data, plot_data_1d_compare from neural_network.learn_network import learn_network # load dataset train, test = load_data('square-large') x = train[:, 0:1] y = train[:, 1:2] # learn network without any momentum technique and show model mse, base = learn_network(x, y, [20], [sigmoid, linear], momentum_type='normal', eta=0.01, epochs=1, iterations=100, plot=False, return_result=True) # learn network with momentum and show model mse_mom, mom = learn_network(x, y, [20], [sigmoid, linear], momentum_type='momentum', lambda_momentum=0.9, eta=0.01, epochs=1, iterations=100, plot=False, return_result=True)