def test_single_initializer(initializer): model = NeuralNetwork( optimizer=AdamOptimizer( learning_rate=default_parameters['learning_rate']), loss=CrossEntropy(), layers=[ Flatten(), Dense(layer_size=50, activation_func=ReLu(), weight_initializer=initializer), Dense(layer_size=10, activation_func=Softmax(), weight_initializer=initializer) ], callbacks=[ LoggerCallback(), PlotCallback(f'./lab_3/initializers/{initializer.get_name()}') ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
def test_single_initializer_with_convo(initializer): model = NeuralNetwork( optimizer=AdamOptimizer( learning_rate=default_parameters['learning_rate'] * 10), loss=CrossEntropy(), layers=[ Convolution2D(num_of_filters=8, kernel=(3, 3), activation_func=ReLu()), MaxPooling2D(pool_size=(2, 2), stride=(2, 2)), Flatten(), Dense(layer_size=50, activation_func=ReLu(), weight_initializer=initializer), Dense(layer_size=10, activation_func=Softmax(), weight_initializer=initializer) ], callbacks=[ LoggerCallback(), PlotCallback(f'./lab_3/initializers/{initializer.get_name()}') ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
def test_single_activation_function(activation): model = NeuralNetwork( optimizer=StaticGradientDescent( learning_rate=default_parameters['learning_rate']), loss=CrossEntropy(), layers=[ Dense(layer_size=50, activation_func=activation, weight_initializer=XavierInitializer()), Dense(layer_size=10, activation_func=Softmax(), weight_initializer=XavierInitializer()) ], callbacks=[ LoggerCallback(), PlotCallback(f'./results/activations/{activation.get_name()}') ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
def test_single_layer(layer_config): model = NeuralNetwork( optimizer=StaticGradientDescent( learning_rate=default_parameters['learning_rate']), loss=CrossEntropy(), layers=layer_config['layers'], callbacks=[ LoggerCallback(), PlotCallback(f"./results/layers/{layer_config['name']}") ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
def test_signle_convo_network(test): model = NeuralNetwork( optimizer=AdamOptimizer( learning_rate=default_parameters['learning_rate']), loss=CrossEntropy(), layers=test['layers'], callbacks=[ LoggerCallback(), PlotCallback(f"./lab_4_fixed/{test['test_name']}") ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
from activations.sigmoid import Sigmoid from activations.softmax import Softmax from losses.crossentropy import CrossEntropy from losses.mse import MeanSquaredError from tests.cost_tests import perform_cost_and_last_layer_tests cost_experiments = [[CrossEntropy(), Softmax()], [CrossEntropy(), Sigmoid()], [MeanSquaredError(), Softmax()], [MeanSquaredError(), Sigmoid()]] perform_cost_and_last_layer_tests(cost_experiments)