def test_single_initializer(initializer): model = NeuralNetwork( optimizer=AdamOptimizer( learning_rate=default_parameters['learning_rate']), loss=CrossEntropy(), layers=[ Flatten(), Dense(layer_size=50, activation_func=ReLu(), weight_initializer=initializer), Dense(layer_size=10, activation_func=Softmax(), weight_initializer=initializer) ], callbacks=[ LoggerCallback(), PlotCallback(f'./lab_3/initializers/{initializer.get_name()}') ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
def test_single_cost_and_last_layer(cost_func, last_layer): model = NeuralNetwork( optimizer=StaticGradientDescent(default_parameters['learning_rate']), loss=cost_func, layers=[ Dense(layer_size=50, activation_func=ReLu(), weight_initializer=XavierInitializer()), Dense(layer_size=10, activation_func=last_layer, weight_initializer=XavierInitializer()) ], callbacks=[ LoggerCallback(), PlotCallback( f'./lab_3/cost/func={cost_func.get_name()}&last_layer={last_layer.get_name()}' ) ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
def test_single_initializer_with_convo(initializer): model = NeuralNetwork( optimizer=AdamOptimizer( learning_rate=default_parameters['learning_rate'] * 10), loss=CrossEntropy(), layers=[ Convolution2D(num_of_filters=8, kernel=(3, 3), activation_func=ReLu()), MaxPooling2D(pool_size=(2, 2), stride=(2, 2)), Flatten(), Dense(layer_size=50, activation_func=ReLu(), weight_initializer=initializer), Dense(layer_size=10, activation_func=Softmax(), weight_initializer=initializer) ], callbacks=[ LoggerCallback(), PlotCallback(f'./lab_3/initializers/{initializer.get_name()}') ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
def test_single_activation_function(activation): model = NeuralNetwork( optimizer=StaticGradientDescent( learning_rate=default_parameters['learning_rate']), loss=CrossEntropy(), layers=[ Dense(layer_size=50, activation_func=activation, weight_initializer=XavierInitializer()), Dense(layer_size=10, activation_func=Softmax(), weight_initializer=XavierInitializer()) ], callbacks=[ LoggerCallback(), PlotCallback(f'./results/activations/{activation.get_name()}') ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
def test_single_layer(layer_config): model = NeuralNetwork( optimizer=StaticGradientDescent( learning_rate=default_parameters['learning_rate']), loss=CrossEntropy(), layers=layer_config['layers'], callbacks=[ LoggerCallback(), PlotCallback(f"./results/layers/{layer_config['name']}") ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
def test_signle_convo_network(test): model = NeuralNetwork( optimizer=AdamOptimizer( learning_rate=default_parameters['learning_rate']), loss=CrossEntropy(), layers=test['layers'], callbacks=[ LoggerCallback(), PlotCallback(f"./lab_4_fixed/{test['test_name']}") ]) model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size']) model.test(X_test, y_test)
from models.model import NeuralNetwork from optimizers.gradient_descent_static import StaticGradientDescent from preprocessing.data_loader import get_data model = NeuralNetwork(optimizer=StaticGradientDescent(learning_rate=0.01), loss=MeanSquaredError(), layers=[ Dense(layer_size=50, activation_func=ReLu(), weight_initializer=XavierInitializer()), Dense(layer_size=10, activation_func=Softmax(), weight_initializer=XavierInitializer()) ], callbacks=[ SaveBestCallback('./results/01_10_2019_13:00', 'best_model.pkl'), LoggerCallback(), PlotCallback('./results/batch_size/', 'test.pkl') ]) (X_train, y_train), (X_val, y_val), (X_test, y_test) = get_data() model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val, epochs=10, batch_size=32) model.test(X_test, y_test)