def test_back_prop_sigmoid(self): layer = ActivationLayer("sigmoid") layer.set_input_shape((4,)) input = np.array([0, 2365836, 0, -154366], dtype=np.float64) layer.forward_prop(input) expected_in_grad = np.array([0.25, 0, 0.25, 0], dtype=np.float64) out_grad = np.ones(4) in_grad = layer.back_prop(out_grad) numpy.testing.assert_array_almost_equal(in_grad, expected_in_grad)
def test_back_prop_leaky_relu(self): layer = ActivationLayer("leakyReLU") layer.set_input_shape((4,)) input = np.array([1, -1, 1, -1], dtype=np.float64) layer.forward_prop(input) out_grad = np.ones(4) expected_in_grad = np.array([1, 0.01, 1, 0.01], dtype=np.float64) in_grad = layer.back_prop(out_grad) numpy.testing.assert_array_almost_equal(in_grad, expected_in_grad)