Ejemplo n.º 1
0
    def test_CheckMinibatchTrainerEqualsSimpleTrainer(self):
        train_set = [(np.random.rand(2), i) for i in xrange(3)]
        loss = SquaredLoss()
        epochs = 1
        optimizer = SGD(learning_rate=0.01)

        minibatch_model = Seq([Linear(2, 5, initialize='ones')])
        minibatch_trainer = MinibatchTrainer()
        minibatch_trainer.train_minibatches(minibatch_model,
                                            train_set,
                                            batch_size=1,
                                            loss=loss,
                                            epochs=epochs,
                                            optimizer=optimizer,
                                            shuffle=False)

        simple_model = Seq([Linear(2, 5, initialize='ones')])
        simple_trainer = OnlineTrainer()
        simple_trainer.train(simple_model, train_set, loss, epochs, optimizer)

        x = np.random.rand(2)

        simple_y = simple_model.forward(x)
        minibatch_y = minibatch_model.forward(x)

        assert_array_equal(simple_y, minibatch_y)
Ejemplo n.º 2
0
 def test_Reduce(self):
     model = Seq(
         [Linear(3, 2, initialize='ones'),
          Linear(2, 2, initialize='ones')])
     x = np.random.rand(3)
     model.forward(x)
     model.backward(np.array([1., 1.]))
Ejemplo n.º 3
0
 def test_Expand(self):
     model = Seq([
         Linear(2, 3, initialize='ones'),
         Linear(3, 2, initialize='ones'),
     ])
     x = np.random.rand(2)
     model.forward(x)
     back = model.backward(np.ones(2))
Ejemplo n.º 4
0
    def test_update_weights_layer_vs_syntax(self):
        x = np.array([1., 2., 3.])
        optimizer = SGD(0.1)

        W = np.random.rand(3, 3 + 1)

        linear_layer = layers.Linear(3, 3, initialize=W.copy())
        linear_layer_model = Seq(linear_layer, layers.Tanh)
        y = linear_layer_model.forward(x)
        back = linear_layer_model.backward(np.ones(3))

        var_x = Var('x')
        syntax_linear = Linear(3, 3, initialize=W.copy(), input=var_x)
        syntax_model = Tanh(syntax_linear)
        syntax_y = syntax_model.forward_variables({'x': x})
        syntax_back = syntax_model.backward_variables(np.ones(3))

        assert_array_equal(linear_layer.delta_W, syntax_linear.layer.delta_W)

        # update weights in both models
        linear_layer_model.update_weights(optimizer)
        syntax_model.update_weights(optimizer)

        assert_array_equal(y, syntax_y)
        assert_array_equal(back, syntax_back['x'])
        assert_array_equal(linear_layer.W, syntax_linear.layer.W)
Ejemplo n.º 5
0
 def test_LinearSigmoid(self):
     model = Seq()
     model.add(Linear(2, 1, initialize='ones'))
     model.add(Sigmoid())
     data = np.array([2., 3.])
     out = model.forward(data)
     self.assertEqual(round(out, 2), 1.)
Ejemplo n.º 6
0
 def test_LinearSoftmax(self):
     model = Seq()
     model.add(Linear(2, 1))
     model.add(Softmax())
     data = np.array([2., 3.])
     out = model.forward(data)
     self.assertEqual(out, 1.)
Ejemplo n.º 7
0
 def test_ManyErrors(self):
     model = Seq(
         [Linear(2, 3, initialize='ones'),
          Linear(3, 1, initialize='ones')])
     x = np.random.rand(2)
     y = model.forward(x)
     model.backward(np.array([1.]))
Ejemplo n.º 8
0
    def test_compare_with_Linear(self):
        in_size = 2
        out_size = 3
        x = np.random.rand(in_size)
        # x = np.array([1., 1])
        optimizer = SGD(0.1)

        linear = Linear(in_size, out_size, initialize='zeros')

        wx = Wx(in_size, out_size, initialize='zeros')
        plusbias = PlusBias(out_size, initialize='zeros')
        wxbias = Seq(wx, plusbias)

        linear_y = linear.forward(x)
        wxbias_y = wxbias.forward(x)
        assert_array_equal(linear_y, wxbias_y)

        dJdy = np.random.rand(out_size)
        linear_grad = linear.backward(dJdy)
        wxbias_grad = wxbias.backward(dJdy)
        assert_array_equal(linear_grad, wxbias_grad)

        linear.update_weights(optimizer)
        wxbias.update_weights(optimizer)

        stack = np.vstack([plusbias.b.get(), wx.W.get().T]).T
        assert_array_equal(linear.W, stack)
Ejemplo n.º 9
0
class WxBiasLinear(Layer):
    def __init__(self, in_size, out_size, initialize_W, initialize_b):
        self.Wx = Wx(in_size, out_size, initialize_W)
        self.bias = PlusBias(out_size, initialize_b)
        self.model = Seq(self.Wx, self.bias)

    def forward(self, x, is_training=False):
        return self.model.forward(x, is_training)

    def backward(self, dJdy):
        return self.model.backward(dJdy)

    def update_weights(self, optimizer):
        return self.model.update_weights(optimizer)
Ejemplo n.º 10
0
    def test_TwoDifferentModelsShouldHaveDifferentGradients(self):
        x = np.random.rand(5)

        real_model = Seq([
            Linear(5, 3, initialize='ones'),
            Tanh(),
            Linear(3, 5, initialize='ones'),
            Tanh()
        ])
        y = real_model.forward(x)
        real_grad = real_model.backward(np.ones(5))

        num_model = Seq([
            Linear(5, 3, initialize='ones'),
            Relu(),
            Linear(3, 5, initialize='ones'),
            Relu()
        ])
        num_grad = numerical_gradient.calc(num_model.forward, x)
        num_grad = np.sum(num_grad, axis=1)
        self.assertFalse(numerical_gradient.are_similar(real_grad, num_grad))
Ejemplo n.º 11
0
    def test_TwoLinearLayersTanh(self):
        x = np.random.rand(5)

        real_model = Seq([
            Linear(5, 3, initialize='ones'),
            Tanh(),
            Linear(3, 5, initialize='ones'),
            Tanh()
        ])
        y = real_model.forward(x)
        real_grad = real_model.backward(np.ones(5))

        num_model = Seq([
            Linear(5, 3, initialize='ones'),
            Tanh(),
            Linear(3, 5, initialize='ones'),
            Tanh()
        ])
        num_grad = numerical_gradient.calc(num_model.forward, x)

        num_grad = np.sum(num_grad, axis=1)
        self.assertTrue(numerical_gradient.are_similar(real_grad, num_grad))
Ejemplo n.º 12
0
 def test_MulLayer(self):
     # 2 * 3 * 4
     model = Seq(Par(Const(2.), Const(3.), Const(4.)), Mul)
     y = model.forward(np.array([1.]))
     assert_array_equal(y, 24.)
Ejemplo n.º 13
0
 def test_init_and_forward_SumLayer(self):
     # 1 + 2 + 3
     model = Seq(Par(Const(1.), Const(2.), Const(3.)), Sum)
     y = model.forward(np.zeros(3))
     assert_array_equal(y, 6.)
Ejemplo n.º 14
0
 def test_short_syntax(self):
     model = Seq(Linear(2, 1, initialize='ones'), Sigmoid)
     data = np.array([2., 3.])
     out = model.forward(data)
     self.assertEqual(round(out, 2), 1.)
Ejemplo n.º 15
0
 def test_Linear(self):
     model = Seq()
     model.add(Linear(2, 1, initialize='ones'))
     data = np.array([2., 2.])
     y = model.forward(data)
     self.assertEqual(y, np.array([5]))