Ejemplo n.º 1
0
 def test_relu_2_backward(self):
     print('\n==================================')
     print('          Test ReLU backward      ')
     print('==================================')
     np.random.seed(123)
     relu = ReLU()
     x = np.random.randn(7, 7)
     d_prev = np.random.randn(*x.shape)
     out = relu.forward(x)
     dx = relu.backward(d_prev, 0.0)
     correct_dx = [[0., -1.29408532, -1.03878821, 0., 0., 0.02968323, 0.],
                   [0., 1.75488618, 0., 0., 0., 0.79486267, 0.],
                   [
                       0., 0., 0.80723653, 0.04549008, -0.23309206,
                       -1.19830114, 0.19952407
                   ], [0.46843912, 0., 1.16220405, 0., 0., 1.03972709, 0.],
                   [0., 0., 0., 0., 0., 0., 0.80730819],
                   [0., -1.0859024, -0.73246199, 0., 2.08711336, 0., 0.],
                   [
                       0., 0.18103513, 1.17786194, 0., 1.03111446,
                       -1.08456791, -1.36347154
                   ]]
     e = rel_error(correct_dx, dx)
     print('dX relative difference:', e)
     self.assertTrue(e <= 5e-08)
Ejemplo n.º 2
0
    '================================ 2. ReLU =================================='
)
"""
* Correct ReLU *

Forward: 
 [1. 0. 3.]
Backward: 
 [-10   0 -30]
"""
relu = ReLU()
temp2 = np.array([1, -0.1, 3], dtype=np.float32)
temp3 = np.array([-10, -20, -30], dtype=np.float32)
print('ReLU Check')
print('Forward: \n', relu.forward(temp2))
print('Backward: \n', relu.backward(temp3))
print()

# ===========================================================================

print(
    '=========================== 3. Convolution Layer =========================='
)
# Convolution with stride 1, no padding
in_channel = 1
out_channel = 3
kernel_size = 3
stride = 1
pad = 0

conv_layer = ConvolutionLayer(in_channel, out_channel, kernel_size, stride,