Esempio n. 1
0
 def test_ManyErrors(self):
     model = Seq(
         [Linear(2, 3, initialize='ones'),
          Linear(3, 1, initialize='ones')])
     x = np.random.rand(2)
     y = model.forward(x)
     model.backward(np.array([1.]))
Esempio n. 2
0
 def test_Reduce(self):
     model = Seq(
         [Linear(3, 2, initialize='ones'),
          Linear(2, 2, initialize='ones')])
     x = np.random.rand(3)
     model.forward(x)
     model.backward(np.array([1., 1.]))
Esempio n. 3
0
    def test_update_weights_layer_vs_syntax(self):
        x = np.array([1., 2., 3.])
        optimizer = SGD(0.1)

        W = np.random.rand(3, 3 + 1)

        linear_layer = layers.Linear(3, 3, initialize=W.copy())
        linear_layer_model = Seq(linear_layer, layers.Tanh)
        y = linear_layer_model.forward(x)
        back = linear_layer_model.backward(np.ones(3))

        var_x = Var('x')
        syntax_linear = Linear(3, 3, initialize=W.copy(), input=var_x)
        syntax_model = Tanh(syntax_linear)
        syntax_y = syntax_model.forward_variables({'x': x})
        syntax_back = syntax_model.backward_variables(np.ones(3))

        assert_array_equal(linear_layer.delta_W, syntax_linear.layer.delta_W)

        # update weights in both models
        linear_layer_model.update_weights(optimizer)
        syntax_model.update_weights(optimizer)

        assert_array_equal(y, syntax_y)
        assert_array_equal(back, syntax_back['x'])
        assert_array_equal(linear_layer.W, syntax_linear.layer.W)
Esempio n. 4
0
    def test_compare_with_Linear(self):
        in_size = 2
        out_size = 3
        x = np.random.rand(in_size)
        # x = np.array([1., 1])
        optimizer = SGD(0.1)

        linear = Linear(in_size, out_size, initialize='zeros')

        wx = Wx(in_size, out_size, initialize='zeros')
        plusbias = PlusBias(out_size, initialize='zeros')
        wxbias = Seq(wx, plusbias)

        linear_y = linear.forward(x)
        wxbias_y = wxbias.forward(x)
        assert_array_equal(linear_y, wxbias_y)

        dJdy = np.random.rand(out_size)
        linear_grad = linear.backward(dJdy)
        wxbias_grad = wxbias.backward(dJdy)
        assert_array_equal(linear_grad, wxbias_grad)

        linear.update_weights(optimizer)
        wxbias.update_weights(optimizer)

        stack = np.vstack([plusbias.b.get(), wx.W.get().T]).T
        assert_array_equal(linear.W, stack)
Esempio n. 5
0
 def test_Expand(self):
     model = Seq([
         Linear(2, 3, initialize='ones'),
         Linear(3, 2, initialize='ones'),
     ])
     x = np.random.rand(2)
     model.forward(x)
     back = model.backward(np.ones(2))
Esempio n. 6
0
    def test_LinearLayerNumericalGradientCheck(self):
        x = np.random.rand(3)

        model = Seq()
        model.add(Linear(3, 2, initialize='ones'))

        num_grad = numerical_gradient.calc(model.forward, x)
        deriv_grad = model.backward(np.array([1, 1]))
        num_grad = np.sum(num_grad, axis=0)

        numerical_gradient.assert_are_similar(deriv_grad, num_grad)
Esempio n. 7
0
class WxBiasLinear(Layer):
    def __init__(self, in_size, out_size, initialize_W, initialize_b):
        self.Wx = Wx(in_size, out_size, initialize_W)
        self.bias = PlusBias(out_size, initialize_b)
        self.model = Seq(self.Wx, self.bias)

    def forward(self, x, is_training=False):
        return self.model.forward(x, is_training)

    def backward(self, dJdy):
        return self.model.backward(dJdy)

    def update_weights(self, optimizer):
        return self.model.update_weights(optimizer)
Esempio n. 8
0
    def test_TwoDifferentModelsShouldHaveDifferentGradients(self):
        x = np.random.rand(5)

        real_model = Seq([
            Linear(5, 3, initialize='ones'),
            Tanh(),
            Linear(3, 5, initialize='ones'),
            Tanh()
        ])
        y = real_model.forward(x)
        real_grad = real_model.backward(np.ones(5))

        num_model = Seq([
            Linear(5, 3, initialize='ones'),
            Relu(),
            Linear(3, 5, initialize='ones'),
            Relu()
        ])
        num_grad = numerical_gradient.calc(num_model.forward, x)
        num_grad = np.sum(num_grad, axis=1)
        self.assertFalse(numerical_gradient.are_similar(real_grad, num_grad))
Esempio n. 9
0
    def test_TwoLinearLayersTanh(self):
        x = np.random.rand(5)

        real_model = Seq([
            Linear(5, 3, initialize='ones'),
            Tanh(),
            Linear(3, 5, initialize='ones'),
            Tanh()
        ])
        y = real_model.forward(x)
        real_grad = real_model.backward(np.ones(5))

        num_model = Seq([
            Linear(5, 3, initialize='ones'),
            Tanh(),
            Linear(3, 5, initialize='ones'),
            Tanh()
        ])
        num_grad = numerical_gradient.calc(num_model.forward, x)

        num_grad = np.sum(num_grad, axis=1)
        self.assertTrue(numerical_gradient.are_similar(real_grad, num_grad))