def test_get_deltas(self): sys.stdout.write('FNN_layer -> Performing get_deltas test ... ') sys.stdout.flush() l = FullConnLayer(input_dim=9, output_dim=4, activation_function=AFct.Sigmoid, initial_weights=0.01 * numx.arange(9 * 4).reshape(9, 4), initial_bias=0.0, initial_offset=0.5, connections=None) l.forward_propagate(1.0 * numx.arange(9).reshape(1, 9)) d = l._get_deltas(1.0 * numx.arange(4).reshape(1, 4), None, None, 0.0, 0.0, None, 0.0) targetd = numx.array([[0., 0.00042823, 0.00062518, 0.00068448]]) assert numx.all(numx.abs(d - targetd) < self.epsilon) d = l._get_deltas(None, 1.0 * numx.arange(4).reshape(1, 4), CFct.SquaredError, 1.0, 0.0, None, 0.0) targetd = numx.array([[ 5.86251700e-04, -1.83457004e-07, -3.12685448e-04, -4.56375237e-04 ]]) assert numx.all(numx.abs(d - targetd) < self.epsilon) d = l._get_deltas(1.0 * numx.arange(4).reshape(1, 4), None, None, 0.0, 0.01, CFct.SquaredError, 1.0) targetd = numx.array([[0.00058039, 0.00085199, 0.00093454, 0.00091031]]) assert numx.all(numx.abs(d - targetd) < self.epsilon) d = l._get_deltas(1.0 * numx.arange(4).reshape(1, 4), 1.0 * numx.arange(4).reshape(1, 4), CFct.SquaredError, 1.0, 0.0, None, 0.0) targetd = numx.array([[0.00058625, 0.00042804, 0.00031249, 0.00022811]]) assert numx.all(numx.abs(d - targetd) < self.epsilon) print('successfully passed!') sys.stdout.flush()
def test_forward_propagate(self): sys.stdout.write('FNN_layer -> Performing forward_propagate test ... ') sys.stdout.flush() l = FullConnLayer(input_dim=9, output_dim=4, activation_function=AFct.Identity, initial_weights=0.001 * numx.arange(9 * 4).reshape(9, 4), initial_bias=0.0, initial_offset=0.5, connections=None) res = l.forward_propagate(numx.arange(9).reshape(1, 9)) target = numx.array([[0.744, 0.7755, 0.807, 0.8385]]) assert numx.all(numx.abs(res - target) < self.epsilon) l = FullConnLayer(input_dim=9, output_dim=4, activation_function=AFct.SoftMax, initial_weights=0.001 * numx.arange(9 * 4).reshape(9, 4), initial_bias=0.0, initial_offset=0.5, connections=None) res = l.forward_propagate(numx.arange(9).reshape(1, 9)) target = numx.array([[0.23831441, 0.2459408, 0.25381124, 0.26193355]]) assert numx.all(numx.abs(res - target) < self.epsilon) print('successfully passed!') sys.stdout.flush()
def test_calculate_gradient(self): sys.stdout.write( 'FNN_layer -> Performing calculate_gradient test ... ') sys.stdout.flush() l = FullConnLayer(input_dim=9, output_dim=4, activation_function=AFct.Identity, initial_weights=0.001 * numx.arange(9 * 4).reshape(9, 4), initial_bias=0.0, initial_offset=0.5, connections=None) l.forward_propagate(numx.arange(9).reshape(1, 9)) l._get_deltas( numx.arange(4).reshape(1, 4), None, None, 0.0, 0.0, None, 0.0) l._backward_propagate() dw, db = l._calculate_gradient() targetW = numx.array([[0., -0.5, -1., -1.5], [0., 0.5, 1., 1.5], [0., 1.5, 3., 4.5], [0., 2.5, 5., 7.5], [0., 3.5, 7., 10.5], [0., 4.5, 9., 13.5], [0., 5.5, 11., 16.5], [0., 6.5, 13., 19.5], [0., 7.5, 15., 22.5]]) assert numx.all(numx.abs(dw - targetW) < self.epsilon) targetb = numx.array([0., 1., 2., 3.]) assert numx.all(numx.abs(db - targetb) < self.epsilon) l = FullConnLayer(input_dim=9, output_dim=4, activation_function=AFct.SoftMax, initial_weights=0.001 * numx.arange(9 * 4).reshape(9, 4), initial_bias=0.0, initial_offset=0.5, connections=None) l.forward_propagate(numx.arange(9).reshape(1, 9)) l._get_deltas( numx.arange(4).reshape(1, 4), None, None, 0.0, 0.0, None, 0.0) l._backward_propagate() dw, db = l._calculate_gradient() targetW = numx.array( [[0.1834263, 0.0663258, -0.05845731, -0.1912948], [-0.1834263, -0.0663258, 0.05845731, 0.1912948], [-0.55027891, -0.19897739, 0.17537192, 0.57388439], [-0.91713152, -0.33162899, 0.29228653, 0.95647398], [-1.28398412, -0.46428059, 0.40920114, 1.33906357], [-1.65083673, -0.59693218, 0.52611575, 1.72165316], [-2.01768934, -0.72958378, 0.64303037, 2.10424275], [-2.38454194, -0.86223538, 0.75994498, 2.48683234], [-2.75139455, -0.99488697, 0.87685959, 2.86942193]]) targetb = numx.array([-0.36685261, -0.1326516, 0.11691461, 0.38258959]) assert numx.all(numx.abs(dw - targetW) < self.epsilon) assert numx.all(numx.abs(db - targetb) < self.epsilon) print('successfully passed!') sys.stdout.flush()
def test_backward_propagate(self): sys.stdout.write( 'FNN_layer -> Performing backward_propagate test ... ') sys.stdout.flush() l = FullConnLayer(input_dim=9, output_dim=4, activation_function=AFct.Identity, initial_weights=0.001 * numx.arange(9 * 4).reshape(9, 4), initial_bias=0.0, initial_offset=0.5, connections=None) l.forward_propagate(numx.arange(9).reshape(1, 9)) l._get_deltas( numx.arange(4).reshape(1, 4), None, None, 0.0, 0.0, None, 0.0) res = l._backward_propagate() target = numx.array( [[0.014, 0.038, 0.062, 0.086, 0.11, 0.134, 0.158, 0.182, 0.206]]) assert numx.all(numx.abs(res - target) < self.epsilon) l = FullConnLayer(input_dim=9, output_dim=4, activation_function=AFct.SoftMax, initial_weights=0.001 * numx.arange(9 * 4).reshape(9, 4), initial_bias=0.0, initial_offset=0.5, connections=None) l.forward_propagate(numx.arange(9).reshape(1, 9)) l._get_deltas( numx.arange(4).reshape(1, 4), None, None, 0.0, 0.0, None, 0.0) res = l._backward_propagate() target = numx.array([[ 0.00124895, 0.00124895, 0.00124895, 0.00124895, 0.00124895, 0.00124895, 0.00124895, 0.00124895, 0.00124895 ]]) assert numx.all(numx.abs(res - target) < self.epsilon) print('successfully passed!') sys.stdout.flush()