Beispiel #1
0
def test_single_activation_function(activation):
    model = NeuralNetwork(
        optimizer=StaticGradientDescent(
            learning_rate=default_parameters['learning_rate']),
        loss=CrossEntropy(),
        layers=[
            Dense(layer_size=50,
                  activation_func=activation,
                  weight_initializer=XavierInitializer()),
            Dense(layer_size=10,
                  activation_func=Softmax(),
                  weight_initializer=XavierInitializer())
        ],
        callbacks=[
            LoggerCallback(),
            PlotCallback(f'./results/activations/{activation.get_name()}')
        ])

    model.fit(x_train=X_train,
              y_train=y_train,
              x_val=X_val,
              y_val=y_val,
              epochs=default_parameters['epochs'],
              batch_size=default_parameters['batch_size'])

    model.test(X_test, y_test)
Beispiel #2
0
def test_single_cost_and_last_layer(cost_func, last_layer):
    model = NeuralNetwork(
        optimizer=StaticGradientDescent(default_parameters['learning_rate']),
        loss=cost_func,
        layers=[
            Dense(layer_size=50,
                  activation_func=ReLu(),
                  weight_initializer=XavierInitializer()),
            Dense(layer_size=10,
                  activation_func=last_layer,
                  weight_initializer=XavierInitializer())
        ],
        callbacks=[
            LoggerCallback(),
            PlotCallback(
                f'./lab_3/cost/func={cost_func.get_name()}&last_layer={last_layer.get_name()}'
            )
        ])

    model.fit(x_train=X_train,
              y_train=y_train,
              x_val=X_val,
              y_val=y_val,
              epochs=default_parameters['epochs'],
              batch_size=default_parameters['batch_size'])

    model.test(X_test, y_test)
def test_single_optimizer(optimizer):
    model = NeuralNetwork(
        optimizer=optimizer,
        loss=CrossEntropy(),
        layers=[
            Dense(layer_size=50,
                  activation_func=ReLu(),
                  weight_initializer=XavierInitializer()),
            Dense(layer_size=10,
                  activation_func=Softmax(),
                  weight_initializer=XavierInitializer())
        ],
        callbacks=[
            LoggerCallback(),
            PlotCallback(f'./lab_3/optimizers/{optimizer.get_name()}')
        ])

    model.fit(x_train=X_train,
              y_train=y_train,
              x_val=X_val,
              y_val=y_val,
              epochs=default_parameters['epochs'],
              batch_size=default_parameters['batch_size'])

    model.test(X_test, y_test)
Beispiel #4
0
 def __init__(self, num_of_filters, kernel, stride=(1, 1), kernel_initializer=XavierInitializer(),
              bias_initializer=ZeroInitializer(),
              activation_func=ReLu(), layer_name='Convo2D'):
     super().__init__(layer_name)
     self._num_of_filters = num_of_filters
     self._kernel = kernel
     self._stride = stride
     self._kernel_initializer = kernel_initializer
     self._bias_initializer = bias_initializer
     self._activation_func = activation_func
     self._z = None
Beispiel #5
0
 def __init__(self,
              layer_size,
              weight_initializer=XavierInitializer(),
              activation_func=Sigmoid(),
              bias_initializer=ZeroInitializer(),
              layer_name='dense'):
     super().__init__(layer_name)
     self._weight_initializer = weight_initializer
     self._activation_func = activation_func
     self._bias_initializer = bias_initializer
     self._layer_size = layer_size
     self._z = None
Beispiel #6
0
from initializers.he_initializer import HeInitializer
from initializers.normal_initializer import NormalInitializer
from initializers.xavier_initializer import XavierInitializer
from tests.initializer_tests import perform_initializer_test

initializers = [
    XavierInitializer(gain=6),
    # HeInitializer(),
    # NormalInitializer(loc=0, scale=1, a=10)
]

perform_initializer_test(initializers)
Beispiel #7
0
from callbacks.save_best_callback import SaveBestCallback
from initializers.xavier_initializer import XavierInitializer
from layers.dense import Dense
from losses.crossentropy import CrossEntropy
from losses.mse import MeanSquaredError
from metrics.normal_accuracy import NormalAccuracy
from models.model import NeuralNetwork
from optimizers.gradient_descent_static import StaticGradientDescent
from preprocessing.data_loader import get_data

model = NeuralNetwork(optimizer=StaticGradientDescent(learning_rate=0.01),
                      loss=MeanSquaredError(),
                      layers=[
                          Dense(layer_size=50,
                                activation_func=ReLu(),
                                weight_initializer=XavierInitializer()),
                          Dense(layer_size=10,
                                activation_func=Softmax(),
                                weight_initializer=XavierInitializer())
                      ],
                      callbacks=[
                          SaveBestCallback('./results/01_10_2019_13:00',
                                           'best_model.pkl'),
                          LoggerCallback(),
                          PlotCallback('./results/batch_size/', 'test.pkl')
                      ])

(X_train, y_train), (X_val, y_val), (X_test, y_test) = get_data()

model.fit(x_train=X_train,
          y_train=y_train,