Ejemplo n.º 1
0
    def test_calculate_gradient(self):
        sys.stdout.write(
            'FNN_layer -> Performing calculate_gradient test ... ')
        sys.stdout.flush()
        l = FullConnLayer(input_dim=9,
                          output_dim=4,
                          activation_function=AFct.Identity,
                          initial_weights=0.001 *
                          numx.arange(9 * 4).reshape(9, 4),
                          initial_bias=0.0,
                          initial_offset=0.5,
                          connections=None)
        l.forward_propagate(numx.arange(9).reshape(1, 9))
        l._get_deltas(
            numx.arange(4).reshape(1, 4), None, None, 0.0, 0.0, None, 0.0)
        l._backward_propagate()
        dw, db = l._calculate_gradient()
        targetW = numx.array([[0., -0.5, -1., -1.5], [0., 0.5, 1., 1.5],
                              [0., 1.5, 3., 4.5], [0., 2.5, 5., 7.5],
                              [0., 3.5, 7., 10.5], [0., 4.5, 9., 13.5],
                              [0., 5.5, 11., 16.5], [0., 6.5, 13., 19.5],
                              [0., 7.5, 15., 22.5]])
        assert numx.all(numx.abs(dw - targetW) < self.epsilon)
        targetb = numx.array([0., 1., 2., 3.])
        assert numx.all(numx.abs(db - targetb) < self.epsilon)

        l = FullConnLayer(input_dim=9,
                          output_dim=4,
                          activation_function=AFct.SoftMax,
                          initial_weights=0.001 *
                          numx.arange(9 * 4).reshape(9, 4),
                          initial_bias=0.0,
                          initial_offset=0.5,
                          connections=None)
        l.forward_propagate(numx.arange(9).reshape(1, 9))
        l._get_deltas(
            numx.arange(4).reshape(1, 4), None, None, 0.0, 0.0, None, 0.0)
        l._backward_propagate()
        dw, db = l._calculate_gradient()
        targetW = numx.array(
            [[0.1834263, 0.0663258, -0.05845731, -0.1912948],
             [-0.1834263, -0.0663258, 0.05845731, 0.1912948],
             [-0.55027891, -0.19897739, 0.17537192, 0.57388439],
             [-0.91713152, -0.33162899, 0.29228653, 0.95647398],
             [-1.28398412, -0.46428059, 0.40920114, 1.33906357],
             [-1.65083673, -0.59693218, 0.52611575, 1.72165316],
             [-2.01768934, -0.72958378, 0.64303037, 2.10424275],
             [-2.38454194, -0.86223538, 0.75994498, 2.48683234],
             [-2.75139455, -0.99488697, 0.87685959, 2.86942193]])
        targetb = numx.array([-0.36685261, -0.1326516, 0.11691461, 0.38258959])
        assert numx.all(numx.abs(dw - targetW) < self.epsilon)
        assert numx.all(numx.abs(db - targetb) < self.epsilon)
        print('successfully passed!')
        sys.stdout.flush()
Ejemplo n.º 2
0
 def test_backward_propagate(self):
     sys.stdout.write(
         'FNN_layer -> Performing backward_propagate test ... ')
     sys.stdout.flush()
     l = FullConnLayer(input_dim=9,
                       output_dim=4,
                       activation_function=AFct.Identity,
                       initial_weights=0.001 *
                       numx.arange(9 * 4).reshape(9, 4),
                       initial_bias=0.0,
                       initial_offset=0.5,
                       connections=None)
     l.forward_propagate(numx.arange(9).reshape(1, 9))
     l._get_deltas(
         numx.arange(4).reshape(1, 4), None, None, 0.0, 0.0, None, 0.0)
     res = l._backward_propagate()
     target = numx.array(
         [[0.014, 0.038, 0.062, 0.086, 0.11, 0.134, 0.158, 0.182, 0.206]])
     assert numx.all(numx.abs(res - target) < self.epsilon)
     l = FullConnLayer(input_dim=9,
                       output_dim=4,
                       activation_function=AFct.SoftMax,
                       initial_weights=0.001 *
                       numx.arange(9 * 4).reshape(9, 4),
                       initial_bias=0.0,
                       initial_offset=0.5,
                       connections=None)
     l.forward_propagate(numx.arange(9).reshape(1, 9))
     l._get_deltas(
         numx.arange(4).reshape(1, 4), None, None, 0.0, 0.0, None, 0.0)
     res = l._backward_propagate()
     target = numx.array([[
         0.00124895, 0.00124895, 0.00124895, 0.00124895, 0.00124895,
         0.00124895, 0.00124895, 0.00124895, 0.00124895
     ]])
     assert numx.all(numx.abs(res - target) < self.epsilon)
     print('successfully passed!')
     sys.stdout.flush()