Example #1
0
def test_single_initializer_with_convo(initializer):
    model = NeuralNetwork(
        optimizer=AdamOptimizer(
            learning_rate=default_parameters['learning_rate'] * 10),
        loss=CrossEntropy(),
        layers=[
            Convolution2D(num_of_filters=8,
                          kernel=(3, 3),
                          activation_func=ReLu()),
            MaxPooling2D(pool_size=(2, 2), stride=(2, 2)),
            Flatten(),
            Dense(layer_size=50,
                  activation_func=ReLu(),
                  weight_initializer=initializer),
            Dense(layer_size=10,
                  activation_func=Softmax(),
                  weight_initializer=initializer)
        ],
        callbacks=[
            LoggerCallback(),
            PlotCallback(f'./lab_3/initializers/{initializer.get_name()}')
        ])

    model.fit(x_train=X_train,
              y_train=y_train,
              x_val=X_val,
              y_val=y_val,
              epochs=default_parameters['epochs'],
              batch_size=default_parameters['batch_size'])

    model.test(X_test, y_test)
Example #2
0
def test_single_cost_and_last_layer(cost_func, last_layer):
    model = NeuralNetwork(
        optimizer=StaticGradientDescent(default_parameters['learning_rate']),
        loss=cost_func,
        layers=[
            Dense(layer_size=50,
                  activation_func=ReLu(),
                  weight_initializer=XavierInitializer()),
            Dense(layer_size=10,
                  activation_func=last_layer,
                  weight_initializer=XavierInitializer())
        ],
        callbacks=[
            LoggerCallback(),
            PlotCallback(
                f'./lab_3/cost/func={cost_func.get_name()}&last_layer={last_layer.get_name()}'
            )
        ])

    model.fit(x_train=X_train,
              y_train=y_train,
              x_val=X_val,
              y_val=y_val,
              epochs=default_parameters['epochs'],
              batch_size=default_parameters['batch_size'])

    model.test(X_test, y_test)
Example #3
0
def test_single_initializer(initializer):
    model = NeuralNetwork(
        optimizer=AdamOptimizer(
            learning_rate=default_parameters['learning_rate']),
        loss=CrossEntropy(),
        layers=[
            Flatten(),
            Dense(layer_size=50,
                  activation_func=ReLu(),
                  weight_initializer=initializer),
            Dense(layer_size=10,
                  activation_func=Softmax(),
                  weight_initializer=initializer)
        ],
        callbacks=[
            LoggerCallback(),
            PlotCallback(f'./lab_3/initializers/{initializer.get_name()}')
        ])

    model.fit(x_train=X_train,
              y_train=y_train,
              x_val=X_val,
              y_val=y_val,
              epochs=default_parameters['epochs'],
              batch_size=default_parameters['batch_size'])

    model.test(X_test, y_test)
Example #4
0
 def __init__(self, num_of_filters, kernel, stride=(1, 1), kernel_initializer=XavierInitializer(),
              bias_initializer=ZeroInitializer(),
              activation_func=ReLu(), layer_name='Convo2D'):
     super().__init__(layer_name)
     self._num_of_filters = num_of_filters
     self._kernel = kernel
     self._stride = stride
     self._kernel_initializer = kernel_initializer
     self._bias_initializer = bias_initializer
     self._activation_func = activation_func
     self._z = None
Example #5
0
def test_single_weight_initializer(weight_initializer):
    model = NeuralNetwork(
        optimizer=StaticGradientDescent(learning_rate=default_parameters['learning_rate']),
        loss=CrossEntropy(),
        layers=[
            Dense(layer_size=50, activation_func=ReLu(), weight_initializer=weight_initializer),
            Dense(layer_size=10, activation_func=Softmax(), weight_initializer=weight_initializer)
        ],
        callbacks=[
            LoggerCallback(),
            PlotCallback(f'./results/weigh_initializer/{weight_initializer.get_name()}')
        ]
    )

    model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val,
              epochs=default_parameters['epochs'], batch_size=default_parameters['batch_size'])

    model.test(X_test, y_test)
Example #6
0
def single_batch_size_test(batch_size):
    model = NeuralNetwork(
        optimizer=StaticGradientDescent(learning_rate=default_parameters['learning_rate']),
        loss=CrossEntropy(),
        layers=[
            Dense(layer_size=50, activation_func=ReLu(), weight_initializer=XavierInitializer()),
            Dense(layer_size=10, activation_func=Softmax(), weight_initializer=XavierInitializer())
        ],
        callbacks=[
            LoggerCallback(),
            PlotCallback('./results/batch_size/')
        ]
    )

    model.fit(x_train=X_train, y_train=y_train, x_val=X_val, y_val=y_val,
              epochs=default_parameters['epochs'], batch_size=batch_size)

    model.test(X_test, y_test)
Example #7
0
 #     'test_name': 'normal_C3x3-F1_MP2x2_F_D50_D10',
 #     'layers': [
 #         Convolution2D(num_of_filters=1, kernel=(3, 3), activation_func=ReLu()),
 #         MaxPooling2D(pool_size=(2, 2), stride=(2, 2)),
 #         Flatten(),
 #         Dense(layer_size=50, activation_func=ReLu(), weight_initializer=HeInitializer()),
 #         Dense(layer_size=10, activation_func=Softmax(), weight_initializer=HeInitializer())
 #     ]
 # },
 {
     'test_name':
     'normal_C3x3-F2_MP2x2_F_D50_D10',
     'layers': [
         Convolution2D(num_of_filters=2,
                       kernel=(3, 3),
                       activation_func=ReLu()),
         MaxPooling2D(pool_size=(2, 2), stride=(2, 2)),
         Flatten(),
         Dense(layer_size=50,
               activation_func=ReLu(),
               weight_initializer=HeInitializer()),
         Dense(layer_size=10,
               activation_func=Softmax(),
               weight_initializer=HeInitializer())
     ]
 },
 {
     'test_name':
     'normal_C3x3-F4_MP2x2_F_D50_D10',
     'layers': [
         Convolution2D(num_of_filters=4,