def test_optimizer(optimizer1):
    l0 = InputLayer(2)
    l1 = DenseLayer(2, 3, 'relu')
    l1.W = W1
    l1.b = b1
    layers = [l0, l1]

    gradients = [{'W': dJdW1, 'b': dJdb1}]

    expected_optimized_W1 = optimized_W1
    expected_optimized_b1 = optimized_b1

    optimizer1.reset(layers)
    optimizer1.optimize(layers, gradients)

    assert_array_almost_equal(l1.params['W'], expected_optimized_W1, decimal=4)
    assert_array_almost_equal(l1.params['b'], expected_optimized_b1, decimal=4)
Beispiel #2
0
 def add_dense_layer(self, dimension: int, activation_function_name: str):
     """
     Add a dense layer to this DockNet after the last layer
     :param dimension: number of neurons of the layer to add
     :param activation_function_name: name of the activation function to use in this layer
     """
     layer = DenseLayer(self.layers[-1].dimension, dimension,
                        activation_function_name)
     self.layers.append(layer)
Beispiel #3
0
def test_initialize(initializer1):
    np.random.seed(1)
    expected_W1 = [[0.08121727, -0.03058782], [-0.02640859, -0.05364843], [0.04327038, -0.11507693]]
    expected_b1 = [[0.08724059], [-0.03806035], [0.01595195]]
    l0 = InputLayer(2)
    l1 = DenseLayer(2, 3, 'relu')
    layers = [l0, l1]
    initializer1.initialize(layers)
    assert l0.params == {}
    assert_array_almost_equal(l1.params['W'], expected_W1)
    assert_array_almost_equal(l1.params['b'], expected_b1)
Beispiel #4
0
def layer1() -> DenseLayer:
    l1 = DenseLayer(2, 3, 'relu')
    l1.W = W1
    l1.b = b1
    yield l1