def test_relu_layer_NUMERICAL_GRADIENT_CHECK(self):
        x = np.linspace(-1, 1, 10 * 32).reshape([10, 32])
        layer = ReLU()
        grads = layer.backward(x, np.ones([10, 32]) / (32 * 10))
        numeric_grads = eval_numerical_gradient(
            lambda x: layer.forward(x).mean(), x=x)

        self.assertTrue(
            np.allclose(grads, numeric_grads, rtol=1e-5, atol=0),
            msg=
            "gradient returned by your layer does not match the numerically computed gradient"
        )
Exemple #2
0
from pathlib import Path

try:
    sys.path.append(os.path.join(Path(os.getcwd()).parent, 'lib'))
    from layers import ReLU

except ImportError:
    print('Library Module Can Not Found')

# Test1(Vector)
layer = ReLU()

x = np.array([0.1, -0.2, 0.3, -0.4, 0.5])
print(x)

y = layer.forward(x)
print(y)
print(layer.mask)

dout = np.array([-0.1, -0.2, -0.3, 0.4, -0.5])
dout = layer.backward(dout)
print(dout)

print('================================')

# Test2(matrix)
x = np.array([[0.1, -0.5, 1.0], [0.2, -0.6, 2.0], [0.3, -0.7, 3.0],
              [0.4, -0.8, 4.0]])

y = layer.forward(x)
print(y)