Esempio n. 1
0
    def __init__(self, **links):
        super(VGG16, self).__init__(
            conv1_1=L.Convolution2D(None, 64, 3, pad=1),
            conv1_2=L.Convolution2D(None, 64, 3, pad=1),

            conv2_1=L.Convolution2D(None, 128, 3, pad=1),
            conv2_2=L.Convolution2D(None, 128, 3, pad=1),

            conv3_1=L.Convolution2D(None, 256, 3, pad=1),
            conv3_2=L.Convolution2D(None, 256, 3, pad=1),
            conv3_3=L.Convolution2D(None, 256, 3, pad=1),

            conv4_1=L.Convolution2D(None, 512, 3, pad=1),
            conv4_2=L.Convolution2D(None, 512, 3, pad=1),
            conv4_3=L.Convolution2D(None, 512, 3, pad=1),
            norm4=Normalize(512, initial=initializers.Constant(20)),

            conv5_1=L.DilatedConvolution2D(None, 512, 3, pad=1),
            conv5_2=L.DilatedConvolution2D(None, 512, 3, pad=1),
            conv5_3=L.DilatedConvolution2D(None, 512, 3, pad=1),

            conv6=L.DilatedConvolution2D(None, 1024, 3, pad=6, dilate=6),
            conv7=L.Convolution2D(None, 1024, 1),
        )
        for name, link in six.iteritems(links):
            self.add_link(name, link)
Esempio n. 2
0
class TestNormalize(unittest.TestCase):

    def setUp(self):
        self.link = Normalize(
            self.n_channel, initializers.Normal(), eps=self.eps)
        self.x = np.random.uniform(size=(1, self.n_channel) + self.shape) \
                          .astype(np.float32)

    def _check_forward(self, x):
        y = self.link(x)

        self.assertIsInstance(y, chainer.Variable)
        self.assertIsInstance(y.array, type(x))
        self.assertEqual(y.shape, x.shape)
        self.assertEqual(y.dtype, x.dtype)

        x = chainer.backends.cuda.to_cpu(x)
        y = chainer.backends.cuda.to_cpu(y.array)
        scale = chainer.backends.cuda.to_cpu(self.link.scale.array)

        norm = np.linalg.norm(x, axis=1, keepdims=True) + self.eps
        expect = x / norm * scale[:, np.newaxis, np.newaxis]
        np.testing.assert_almost_equal(y, expect)

    def test_forward_cpu(self):
        self._check_forward(self.x)

    @attr.gpu
    def test_forward_gpu(self):
        self.link.to_gpu()
        self._check_forward(chainer.backends.cuda.to_gpu(self.x))

    def test_forward_zero_cpu(self):
        self._check_forward(np.zeros_like(self.x))

    @attr.gpu
    def test_forward_zero_gpu(self):
        self.link.to_gpu()
        self._check_forward(
            chainer.backends.cuda.to_gpu(np.zeros_like(self.x)))
Esempio n. 3
0
class TestNormalize(unittest.TestCase):

    def setUp(self):
        self.link = Normalize(
            self.n_channel, initializers.Normal(), eps=self.eps)
        self.x = np.random.uniform(size=(1, self.n_channel) + self.shape) \
                          .astype(np.float32)

    def _check_forward(self, x):
        y = self.link(x)

        self.assertIsInstance(y, chainer.Variable)
        self.assertIsInstance(y.array, type(x))
        self.assertEqual(y.shape, x.shape)
        self.assertEqual(y.dtype, x.dtype)

        x = chainer.cuda.to_cpu(x)
        y = chainer.cuda.to_cpu(y.array)
        scale = chainer.cuda.to_cpu(self.link.scale.array)

        norm = np.linalg.norm(x, axis=1, keepdims=True) + self.eps
        expect = x / norm * scale[:, np.newaxis, np.newaxis]
        np.testing.assert_almost_equal(y, expect)

    def test_forward_cpu(self):
        self._check_forward(self.x)

    @attr.gpu
    def test_forward_gpu(self):
        self.link.to_gpu()
        self._check_forward(chainer.cuda.to_gpu(self.x))

    def test_forward_zero_cpu(self):
        self._check_forward(np.zeros_like(self.x))

    @attr.gpu
    def test_forward_zero_gpu(self):
        self.link.to_gpu()
        self._check_forward(chainer.cuda.to_gpu(np.zeros_like(self.x)))
Esempio n. 4
0
    def __init__(self):
        super(VGG16, self).__init__()
        with self.init_scope():
            self.conv1_1 = L.Convolution2D(64, 3, pad=1)
            self.conv1_2 = L.Convolution2D(64, 3, pad=1)

            self.conv2_1 = L.Convolution2D(128, 3, pad=1)
            self.conv2_2 = L.Convolution2D(128, 3, pad=1)

            self.conv3_1 = L.Convolution2D(256, 3, pad=1)
            self.conv3_2 = L.Convolution2D(256, 3, pad=1)
            self.conv3_3 = L.Convolution2D(256, 3, pad=1)

            self.conv4_1 = L.Convolution2D(512, 3, pad=1)
            self.conv4_2 = L.Convolution2D(512, 3, pad=1)
            self.conv4_3 = L.Convolution2D(512, 3, pad=1)
            self.norm4 = Normalize(512, initial=initializers.Constant(20))

            self.conv5_1 = L.DilatedConvolution2D(512, 3, pad=1)
            self.conv5_2 = L.DilatedConvolution2D(512, 3, pad=1)
            self.conv5_3 = L.DilatedConvolution2D(512, 3, pad=1)

            self.conv6 = L.DilatedConvolution2D(1024, 3, pad=6, dilate=6)
            self.conv7 = L.Convolution2D(1024, 1)
Esempio n. 5
0
 def _setup_normalize(self, layer):
     blobs = layer.blobs
     func = Normalize(caffe._get_num(blobs[0]))
     func.scale.array[:] = np.array(blobs[0].data)
     with self.init_scope():
         setattr(self, layer.name, func)
Esempio n. 6
0
 def setUp(self):
     self.link = Normalize(
         self.n_channel, initializers.Normal(), eps=self.eps)
     self.x = np.random.uniform(size=(1, self.n_channel) + self.shape) \
                       .astype(np.float32)
Esempio n. 7
0
 def setUp(self):
     self.link = Normalize(
         self.n_channel, initializers.Normal(), eps=self.eps)
     self.x = np.random.uniform(size=(1, self.n_channel) + self.shape) \
                       .astype(np.float32)
Esempio n. 8
0
 def _setup_normarize(self, layer):
     blobs = layer.blobs
     func = Normalize(caffe._get_num(blobs[0]))
     func.scale.data[:] = np.array(blobs[0].data)
     self.add_link(layer.name, func)