Beispiel #1
0
    def test_forward6(self):
        N, C, H, W = 20, 10, 5, 5
        cl = chainer.links.BatchNormalization(C)
        l = dezero.layers.BatchNorm()

        for i in range(10):
            x = np.random.randn(N, C, H, W).astype('f')
            cy = cl(x)
            y = l(x)
            self.assertTrue(array_allclose(y.data, cy.data))
        self.assertTrue(array_allclose(cl.avg_mean.data, l.avg_mean.data))
        self.assertTrue(array_allclose(cl.avg_var.data, l.avg_var.data))
Beispiel #2
0
    def test_forward2(self):
        N, C = 8, 3
        cl = chainer.links.BatchNormalization(C)
        l = dezero.layers.BatchNorm()
        for i in range(10):
            x, gamma, beta, mean, var = get_params(N, C)
            cy = cl(x)
            y = l(x)
        self.assertTrue(array_allclose(cl.avg_mean, l.avg_mean.data))
        self.assertTrue(array_allclose(cl.avg_var, l.avg_var.data))

        with dezero.test_mode():
            y = l(x)
        with chainer.using_config('train', False):
            cy = cl(x)
        self.assertTrue(array_allclose(cy.data, y.data))
Beispiel #3
0
 def test_forward1(self):
     x = np.array([[-1, 0, 1, 2], [2, 0, 1, -1]], np.float32)
     t = np.array([3, 0]).astype(np.int32)
     y = F.softmax_cross_entropy(x, t)
     y2 = CF.softmax_cross_entropy(x, t)
     res = array_allclose(y.data, y2.data)
     self.assertTrue(res)
Beispiel #4
0
 def test_forward2(self):
     slope = 0.002
     x = np.random.randn(100)
     y2 = CF.leaky_relu(x, slope)
     y = F.leaky_relu(x, slope)
     res = array_allclose(y.data, y2.data)
     self.assertTrue(res)
Beispiel #5
0
 def test_forward4(self):
     shape = (10, 20, 30)
     axis = (0, 1)
     x = Variable(np.random.rand(*shape))
     y = F.max(x, axis=axis, keepdims=True)
     expected = np.max(x.data, axis=axis, keepdims=True)
     self.assertTrue(array_allclose(y.data, expected))
Beispiel #6
0
 def test_forward3(self):
     np.random.seed(0)
     x = np.random.rand(10, 10, 10).astype('f')
     y2 = CF.softmax(x, axis=1)
     y = F.softmax(Variable(x))
     res = array_allclose(y.data, y2.data)
     self.assertTrue(res)
Beispiel #7
0
 def test_forward2(self):
     shape = (10, 20, 30)
     axis = 1
     x = Variable(np.random.rand(*shape))
     y = F.max(x, axis=axis)
     expected = np.max(x.data, axis=axis)
     self.assertTrue(array_allclose(y.data, expected))
 def test_forward4(self):
     N, C, H, W = 20, 10, 5, 5
     x, gamma, beta, mean, var = get_params(N, C, H, W)
     cy = CF.fixed_batch_normalization(x, gamma, beta, mean, var)
     with dezero.test_mode():
         y = F.batch_nrom(x, gamma, beta, mean, var)
     self.assertTrue(array_allclose(y.data, cy.data))
Beispiel #9
0
 def test_forward1(self):
     x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
     w = Variable(x.data.T)
     y = F.matmul(x, w)
     res = y.data
     expected = np.array([[14, 32], [32, 77]])
     self.assertTrue(array_allclose(res, expected))
    def test_compare1(self):
        rate = 0.4
        x = np.random.rand(10, 2)
        t = np.zeros((10)).astype(int)
        layer = dezero.layers.Linear(2, 3, nobias=True)
        layer.W.data = np.ones_like(layer.W.data)

        optimizer = dezero.optimizers.SGD().setup(layer)
        optimizer.add_hook(dezero.optimizers.WeightDecay(rate=rate))

        layer.cleargrads()
        y = layer(x)
        y = F.softmax_cross_entropy(y, t)
        y.backward()
        optimizer.update()
        W0 = layer.W.data.copy()

        layer.W.data = np.ones_like(layer.W.data)
        optimizer.hooks.clear()
        layer.cleargrads()
        y = layer(x)
        y = F.softmax_cross_entropy(y, t) + rate / 2 * (layer.W**2).sum()
        y.backward()
        optimizer.update()
        W1 = layer.W.data
        self.assertTrue(array_allclose(W0, W1))
Beispiel #11
0
    def test_forward1(self):
        x = np.random.randn(1, 3, 224, 224).astype('f')
        _model = chainer.links.VGG16Layers(None)
        _model.to_gpu()

        with chainer.using_config('train', False):
            with chainer.using_config('enable_backprop', False):
                out_layer_name = 'fc8'
                _y = _model.forward(x, [out_layer_name])[out_layer_name]

        model = VGG16()
        layers = _model.available_layers
        for l in layers:
            if "conv" in l or "fc" in l:
                m1 = getattr(model, l)
                m2 = getattr(_model, l)
                m1.W.data = m2.W.data
                m1.b.data = m2.b.data
                if "fc" in l:
                    m1.W.data = m1.W.data.T
        model.to_gpu()

        with dezero.test_mode():
            y = model(x)

        self.assertTrue(array_allclose(y.data, _y.data))
Beispiel #12
0
    def test_forward2(self):
        x = np.array([[1, 2, 3], [4, 5, 6]]).astype('f')
        W = x.T
        b = None
        y = F.linear(x, W, b)

        cy = chainer.functions.linear(x, W.T)
        self.assertTrue(array_allclose(y.data, cy.data))
Beispiel #13
0
    def test_forward1(self):
        n, c, h, w = 1, 5, 16, 16
        ksize, stride, pad = 2, 2, 0
        x = np.random.randn(n, c, h, w).astype('f')

        y = F.pooling_simple(x, ksize, stride, pad)
        expected = CF.max_pooling_2d(x, ksize, stride, pad)
        self.assertTrue(array_allclose(expected.data, y.data))
Beispiel #14
0
    def test_forward2(self):
        n, c, h, w = 1, 5, 15, 15
        ksize, stride, pad = 2, 2, 0
        x = np.random.randn(n, c, h, w).astype('f')

        y = F.average_pooling(x, ksize, stride, pad)
        expected = CF.average_pooling_2d(x, ksize, stride, pad)
        self.assertTrue(array_allclose(expected.data, y.data))
Beispiel #15
0
 def test_forward3(self):
     n, c, h, w = 1, 5, 20, 15
     o, k, s, p = 3, (5, 3), 1, 3
     x = np.random.randn(n, c, h, w).astype('f')
     W = np.random.randn(o, c, k[0], k[1]).astype('f')
     b = None
     y = F.conv2d(x, W, b, s, p)
     expected = CF.convolution_2d(x, W, b, s, p)
     self.assertTrue(array_allclose(expected.data, y.data))
Beispiel #16
0
 def test_forward1(self):
     n, c, h, w = 1, 5, 15, 15
     o, k, s, p = 8, (3, 3), (1, 1), (1, 1)
     x = np.random.randn(n, c, h, w).astype('f')
     W = np.random.randn(o, c, k[0], k[1]).astype('f')
     b = None
     y = F.conv2d_simple(x, W, b, s, p)
     expected = CF.convolution_2d(x, W, b, s, p)
     self.assertTrue(array_allclose(expected.data, y.data))
Beispiel #17
0
    def test_forward3(self):
        layer = chainer.links.Linear(3, 2)
        x = np.array([[1, 2, 3], [4, 5, 6]]).astype('f')
        W = layer.W.data.T
        b = layer.b.data
        y = F.linear(x, W, b)

        cy = layer(x)
        self.assertTrue(array_allclose(y.data, cy.data))
 def test_forward1(self):
     N, C = 8, 1
     x, gamma, beta, mean, var = get_params(N, C)
     cy = CF.batch_normalization(x,
                                 gamma,
                                 beta,
                                 running_mean=mean,
                                 running_var=var)
     y = F.batch_nrom(x, gamma, beta, mean, var)
     self.assertTrue(array_allclose(y.data, cy.data))
Beispiel #19
0
    def test_forward1(self):
        N, C = 8, 3
        x, gamma, beta, mean, var = get_params(N, C)
        ly = chainer.links.BatchNormalization(3)
        l = dezero.layers.BatchNorm()
        ly.to_gpu()
        l.to_gpu()
        cy = ly(x)
        y = l(x)

        self.assertTrue(array_allclose(y.data, cy.data))
Beispiel #20
0
    def test_backward1(self):
        x = np.random.randn(2, 3, 224, 224).astype('f')
        _model = chainer.links.VGG16Layers(None)

        with chainer.using_config('train', False):
            out_layer_name = 'fc8'
            _y = _model.forward(x, [out_layer_name])[out_layer_name]
            _y.grad = np.ones_like(_y.data)
            _y.backward()

        model = VGG16()
        layers = _model.available_layers
        for l in layers:
            if "conv" in l or "fc" in l:
                m1 = getattr(model, l)
                m2 = getattr(_model, l)
                m1.W.data = m2.W.data
                m1.b.data = m2.b.data
                if "fc" in l:
                    m1.W.data = m1.W.data.T

        with dezero.test_mode():
            y = model(x)
            y.backward()

        layers = _model.available_layers
        for l in layers:
            if "conv" in l:
                m1 = getattr(model, l)
                m2 = getattr(_model, l)
                self.assertTrue(array_allclose(m1.W.data, m2.W.data))
                self.assertTrue(array_allclose(m1.b.data, m2.b.data))
            elif "fc" in l:
                m1 = getattr(model, l)
                m2 = getattr(_model, l)
                self.assertTrue(array_allclose(m1.W.data, m2.W.data.T))
                self.assertTrue(array_allclose(m1.b.data, m2.b.data))
Beispiel #21
0
 def test_forward2(self):
     n, c_i, c_o = 10, 1, 3
     h_i, w_i = 5, 10
     h_k, w_k = 10, 10
     h_p, w_p = 5, 5
     s_y, s_x = 5, 5
     x = np.random.uniform(0, 1, (n, c_i, h_i, w_i)).astype(np.float32)
     W = np.random.uniform(0, 1, (c_i, c_o, h_k, w_k)).astype(np.float32)
     b = None
     expected = CF.deconvolution_2d(x,
                                    W,
                                    b,
                                    stride=(s_y, s_x),
                                    pad=(h_p, w_p))
     y = F.deconv2d(x, W, b, stride=(s_y, s_x), pad=(h_p, w_p))
     self.assertTrue(array_allclose(expected.data, y.data))
Beispiel #22
0
 def test_forward3(self):
     x = Variable(np.random.rand(10, 20, 30))
     y = F.sum(x, axis=1, keepdims=True)
     expected = np.sum(x.data, axis=1, keepdims=True)
     self.assertTrue(array_allclose(y.data, expected))
 def test_forward3(self):
     N, C = 20, 10
     x, gamma, beta, mean, var = get_params(N, C)
     cy = CF.batch_normalization(x, gamma, beta)
     y = F.batch_nrom(x, gamma, beta, mean, var)
     self.assertTrue(array_allclose(y.data, cy.data))
Beispiel #24
0
 def test_forward1(self):
     x_data = np.arange(12).reshape((2, 2, 3))
     x = Variable(x_data)
     y = F.get_item(x, 0)
     self.assertTrue(array_allclose(y.data, x_data[0]))
Beispiel #25
0
 def test_forward3(self):
     x_data = np.arange(12).reshape((2, 2, 3))
     x = Variable(x_data)
     y = F.get_item(x, (Ellipsis, 2))
     self.assertTrue(array_allclose(y.data, x_data[..., 2]))
Beispiel #26
0
 def test_forward2(self):
     x_data = np.arange(12).reshape((2, 2, 3))
     x = Variable(x_data)
     y = F.get_item(x, (0, 0, slice(0, 2, 1)))
     self.assertTrue(array_allclose(y.data, x_data[0, 0, 0:2:1]))
Beispiel #27
0
 def test_forward3(self):
     x = Variable(np.random.rand(10))
     y = F.sum_to(x, (10, ))
     expected = x.data  # 同じ形状なので何もしない
     self.assertTrue(array_allclose(y.data, expected))
Beispiel #28
0
 def test_forward2(self):
     x = Variable(np.array([[1., 2., 3.], [4., 5., 6.]]))
     y = F.sum_to(x, (1, 3))
     expected = np.sum(x.data, axis=0, keepdims=True)
     self.assertTrue(array_allclose(y.data, expected))
Beispiel #29
0
 def test_forward1(self):
     x0 = np.array([0.0, 1.0, 2.0])
     x1 = np.array([0.0, 1.0, 2.0])
     expected = ((x0 - x1)**2).sum() / x0.size
     y = F.mean_squared_error_simple(x0, x1)
     self.assertTrue(array_allclose(y.data, expected))
Beispiel #30
0
 def test_forward1(self):
     x = Variable(np.random.rand(10))
     y = F.sum_to(x, (1, ))
     expected = np.sum(x.data)
     self.assertTrue(array_allclose(y.data, expected))