def forward(self, x): if self.W.data is None: self.in_channels = x.shape[1] self._init_W() y = F.conv2d(x, self.W, self.b, self.stride, self.pad) return y
def __call__(self, x): if self.W.data is None: self.in_channels = x.shape[1] xp = cuda.get_array_module(x) self._init_W(xp) y = F.conv2d(x, self.W, self.b, self.stride, self.pad) return y
def test_backward3(self): n, c, h, w = 1, 5, 20, 15 o, k, s, p = 3, (5, 3), 1, 3 x = np.random.randn(n, c, h, w) W = np.random.randn(o, c, k[0], k[1]) b = np.random.randn(o) f = lambda W: F.conv2d(x, W, b, s, p) self.assertTrue(gradient_check(f, W))
def test_backward2(self): n, c, h, w = 1, 5, 20, 15 o, k, s, p = 3, (5, 3), 1, 3 x = np.random.randn(n, c, h, w) W = np.random.randn(o, c, k[0], k[1]) b = np.random.randn(o) f = lambda b: F.conv2d(x, W, b, s, p) self.assertTrue(check_backward(f, b))
def test_forward3(self): n, c, h, w = 1, 5, 20, 15 o, k, s, p = 3, (5, 3), 1, 3 x = np.random.randn(n, c, h, w).astype('f') W = np.random.randn(o, c, k[0], k[1]).astype('f') b = None y = F.conv2d(x, W, b, s, p) expected = CF.convolution_2d(x, W, b, s, p) self.assertTrue(array_allclose(expected.data, y.data))
def test_forward2(self): n, c, h, w = 1, 5, 15, 15 o, k, s, p = 8, (3, 3), (3, 1), (2, 1) x = np.random.randn(n, c, h, w).astype('f') W = np.random.randn(o, c, k[0], k[1]).astype('f') b = None y = F.conv2d(x, W, b, s, p) expected = CF.convolution_2d(x, W, b, s, p) self.assertTrue(np.array_equal(expected.data, y.data))
sys.path.append(os.path.join(os.path.dirname(__file__), '..')) import numpy as np from dezero import Variable import dezero.functions as F x1 = np.random.rand(1, 3, 7, 7) col1 = F.im2col(x1, kernel_size=5, stride=1, pad=0, to_matrix=True) print(col1.shape) x2 = np.random.rand(10, 3, 7, 7) kernel_size = (5, 5) stride = (1, 1) pad = (0, 0) col2 = F.im2col(x2, kernel_size, stride, pad, to_matrix=True) print(col2.shape) print() N, C, H, W = 1, 5, 15, 15 OC, (KH, KW) = 8, (3, 3) x = Variable(np.random.randn(N, C, H, W)) W = np.random.randn(OC, C, KH, KW) # y = F.conv2d_simple(x, W, b=None, stride=1, pad=1) ## こっちだと動かない y = F.conv2d(x, W, b=None, stride=1, pad=1) y.backward() print(y.shape) print(x.grad.data)
def __call__(self, x): if self.W.data is None: self._init_W(x) y = F.conv2d(x, self.W, self.b, self.stride, self.pad) return y
def __call__(self, x): y = F.conv2d(x, self.W, self.b, self.stride, self.pad) return y