Ejemplo n.º 1
0
from neural_network.activations import *
from neural_network.data_manipulation import load_data
from neural_network.plots import plot_data_1d_compare, plot_measure_results_data
from neural_network.learn_network import learn_network
from neural_network.testing_model import add_the_same_value_to_the_same_length
import matplotlib.pyplot as plt

# load dataset
train, test = load_data('multimodal-sparse')

# x and y - observations and values for them
x = train[:, 0:1]
y = train[:, 1:2]
x_test = test[:, 0:1]
y_test = test[:, 1:2]

# neurons in each layer
neurons = [50, 50, 50]
# labels for plots
labels = [
    'no regularization', 'L1, lambda: 0.01', 'L1, lambda: 0.001',
    'L1, lambda: 0.0001', 'L2, lambda: 0.01', 'L2, lambda: 0.001',
    'L2, lambda: 0.0001'
]

# learn model and plot result classes
i = 2
activation = sigmoid
iterations = 10000
no_change_epochs_to_stop = 100
mse_no_reg, res_no_reg = learn_network(
Ejemplo n.º 2
0
from neural_network.activations import *
from neural_network.learn_network import learn_network
from neural_network.data_manipulation import load_data
from neural_network.plots import plot_data_1d_compare, plot_measure_results_data

train, test = load_data('multimodal-large')

x = train[:, 0:1]
y = train[:, 1:2]

# weights experiment
mse_zeros, zeros = learn_network(x,
                                 y, [20], [sigmoid, linear],
                                 initialize_weights='zeros',
                                 iterations=100,
                                 momentum_type='normal',
                                 plot=False,
                                 return_result=True)
mse_normal, normal = learn_network(x,
                                   y, [20], [sigmoid, linear],
                                   initialize_weights='norm',
                                   iterations=100,
                                   momentum_type='normal',
                                   plot=False,
                                   return_result=True)
mse_Xavier, Xavier = learn_network(x,
                                   y, [20], [sigmoid, linear],
                                   initialize_weights='Xavier',
                                   iterations=100,
                                   momentum_type='normal',
                                   plot=False,
Ejemplo n.º 3
0
from neural_network.data_manipulation import load_data
import numpy as np
from genetic.neat import NEAT
import copy
from neural_network.plots import plot_data_2d, plot_measure_results_data
import matplotlib.pyplot as plt
from sklearn.preprocessing import OneHotEncoder
from neural_network.activations import softmax

# load dataset
train, test = load_data(file_name='rings3-regular',
                        folder='classification',
                        classification=True)

# x and y - observations and values for them
x = train[:, 0:2]
y = train[:, 2:3]
ohe = OneHotEncoder(sparse=False)
y_ohe = ohe.fit_transform(train[:, 2:3])


# create score function for easy
def score_function(res, type="score"):
    results = np.vstack([res[2], res[3], res[4]])
    results = softmax(results).transpose()
    if type == "score":
        return -np.sum((results - y_ohe)**2) / res[2].shape[0] / 3
    else:
        results = results.transpose()
        return np.sum(train[:,
                            2] == np.argmax(results, axis=0)) / res[2].shape[0]
Ejemplo n.º 4
0
from neural_network.activations import *
from neural_network.data_manipulation import load_data
from neural_network.plots import plot_data_2d, plot_measure_results_data
from neural_network.learn_network import learn_network
from neural_network.testing_model import add_the_same_value_to_the_same_length
import matplotlib.pyplot as plt

# load dataset
train, test = load_data(file_name='rings5-sparse',
                        folder='classification',
                        classification=True)

# x and y - observations and values for them
x = train[:, 0:2]
y = train[:, 2:3]
x_test = test[:, 0:2]
y_test = test[:, 2:3]

# plot data classes
plt.figure(figsize=(12.8, 4.8))
plt.subplot(121)
plot_data_2d(x[:, 0],
             x[:, 1],
             y[:, 0],
             title='True classes of points from rings5 sparse dataset',
             show=False)

neurons = [50, 50, 50]

labels = [
    'no regularization', 'L1, lambda: 0.001', 'L1, lambda: 0.0001',
Ejemplo n.º 5
0
from neural_network.activations import *
from neural_network.data_manipulation import load_data
from neural_network.plots import plot_data_2d, plot_measure_results_data
from neural_network.learn_network import learn_network
from neural_network.testing_model import add_the_same_value_to_the_same_length
import matplotlib.pyplot as plt

# load dataset
train, test = load_data(file_name='xor3-balance',
                        folder='classification',
                        classification=True)

# x and y - observations and values for them
x = train[:, 0:2]
y = train[:, 2:3]
x_test = test[:, 0:2]
y_test = test[:, 2:3]

# plot data classes
plt.figure(figsize=(12.8, 4.8))
plt.subplot(121)
plot_data_2d(x[:, 0],
             x[:, 1],
             y[:, 0],
             title='True classes of points from xor3 balance dataset',
             show=False)

neurons = [50, 50, 50]

labels = [
    'no regularization', 'L1, lambda: 0.001', 'L1, lambda: 0.0001',
Ejemplo n.º 6
0
import matplotlib.pyplot as plt

from neural_network.Network import Network
from neural_network.activations import *

from neural_network.data_manipulation import load_data
from neural_network.testing_model import MSE

train, test = load_data('square-simple')

x = train[:, 0:1]
y = train[:, 1:2]

# base architecture
simple_square = Network(1, [5], 1, [sigmoid, linear])
res = simple_square.forward(x)
print(MSE(res, y))

simple_square.set_weights_and_bias([
    np.array([[15], [5], [10], [-8], [-10]]),
    np.array([[1.5, -0.5, 2, -0.8, 2]])
], [np.array([[-12], [1], [-15], [1], [-15]]),
    np.array([[0]])])
res = simple_square.forward(x)
print(MSE(res, y))
plt.plot(x, y, 'bo')
plt.plot(x, res, 'ro')
plt.title("Neural network with weights and bias written by hand")
plt.legend(["true", "predicted"])
plt.xlabel('observed values')
plt.ylabel('result values')
Ejemplo n.º 7
0
from neural_network.activations import *
from neural_network.learn_network import learn_network
from neural_network.data_manipulation import load_data
from neural_network.plots import plot_data_1d_compare, plot_measure_results_data

train, test = load_data('steps-small')

x = train[:, 0:1]
y = train[:, 1:2]

# weights experiment
mse_zeros, zeros = learn_network(x,
                                 y, [20], [sigmoid, linear],
                                 initialize_weights='zeros',
                                 iterations=200,
                                 momentum_type='normal',
                                 plot=False,
                                 return_result=True)
mse_normal, normal = learn_network(x,
                                   y, [20], [sigmoid, linear],
                                   initialize_weights='norm',
                                   iterations=200,
                                   momentum_type='normal',
                                   plot=False,
                                   return_result=True)
mse_Xavier, Xavier = learn_network(x,
                                   y, [20], [sigmoid, linear],
                                   initialize_weights='Xavier',
                                   iterations=200,
                                   momentum_type='normal',
                                   plot=False,
Ejemplo n.º 8
0
from neural_network.activations import *
from neural_network.data_manipulation import load_data
from neural_network.plots import plot_data_1d_compare, plot_measure_results_data
from neural_network.learn_network import learn_network
import matplotlib.pyplot as plt

# load dataset
train, test = load_data('steps-large')

# x and y - observations and values for them
x = train[:, 0:1]
y = train[:, 1:2]

# neurons in each layer
neurons = [50, 50, 50]
# labels for plots
labels = ['linear', 'ReLU', 'sigmoid', 'tanh']

# learn model and plot result classes
plt.figure(figsize=(12.8, 19.2))
for i in range(1, 4):
    mse_linear, res_linear = learn_network(x, y, neurons[:i], [linear] * i + [linear], beta=0.01, eta=0.01, epochs=1, iterations=300, plot_title="Prediction with linear activation function and " + str(i) + " hidden layers", plot=False, return_result=True)
    mse_ReLU, res_ReLU = learn_network(x, y, neurons[:i], [ReLU] * i + [linear], beta=0.01, eta=0.01, epochs=1, iterations=300, plot_title="Prediction with ReLU activation function and " + str(i) + " hidden layers", plot=False, return_result=True)
    mse_sigmoid, res_sigmoid = learn_network(x, y, neurons[:i], [sigmoid] * i + [linear], beta=0.01, eta=0.01, epochs=1, iterations=300, plot_title="Prediction with sigmoid activation function and " + str(i) + " hidden layers", plot=False, return_result=True)
    mse_tanh, res_tanh = learn_network(x, y, neurons[:i], [tanh] * i + [linear], beta=0.01, eta=0.01, epochs=1, iterations=300, plot_title="Prediction with tanh activation function and " + str(i) + " hidden layers", plot=False, return_result=True)

    plt.subplot(420 + 2*i - 1)
    plot_data_1d_compare(x, y, [res_linear, res_ReLU, res_sigmoid, res_tanh], labels=["true"] + labels, title="Comparison of activation functions for " + str(i) + " hidden layers networks", show=False)
    # plot from 5th iteration to better see differences
    plt.subplot(420 + 2*i)
    plot_measure_results_data([mse_linear, mse_ReLU, mse_sigmoid, mse_tanh], labels=labels, title_ending=" for " + str(i) + " layers networks", from_error=5, show=False)
Ejemplo n.º 9
0
from neural_network.activations import *
from neural_network.data_manipulation import load_data
from neural_network.plots import plot_measure_results_data, plot_data_1d_compare
from neural_network.learn_network import learn_network

# load dataset
train, test = load_data('square-large')

x = train[:, 0:1]
y = train[:, 1:2]

# learn network without any momentum technique and show model
mse, base = learn_network(x,
                          y, [20], [sigmoid, linear],
                          momentum_type='normal',
                          eta=0.01,
                          epochs=1,
                          iterations=100,
                          plot=False,
                          return_result=True)

# learn network with momentum and show model
mse_mom, mom = learn_network(x,
                             y, [20], [sigmoid, linear],
                             momentum_type='momentum',
                             lambda_momentum=0.9,
                             eta=0.01,
                             epochs=1,
                             iterations=100,
                             plot=False,
                             return_result=True)