def test_update_parameters(self): layer = Dense(10, sigmoid) layer.build(20) learning_rate = 0.1 dW = np.random.randn(10, 20) db = np.random.randn(10, 1) layer.update_gradients(dW, db) new_dW = layer.dW - learning_rate * dW new_db = layer.db - learning_rate * db layer.update_parameters(learning_rate) self.assertEqual(layer.dW.all(), new_dW.all()) self.assertEqual(layer.db.all(), new_db.all())
def test_layer_output_shape_after_forward_activation(self): layer = Dense(10, sigmoid) layer.build(20) layer.forward_activation(np.random.randn(20, 35)) self.assertEqual(layer.A.shape, (10, 35))
def test_b_shape_after_build(self): layer = Dense(10, sigmoid) layer.build(20) self.assertEqual(layer.b.shape, (10, 1))