def check_forward(self, h_data): h = chainer.Variable(h_data) loss = functions.decov(h) self.assertEqual(loss.data.shape, ()) self.assertEqual(loss.data.dtype, numpy.float32) loss_value = float(loss.data) # Compute expected value h_data = cuda.to_cpu(h_data) h_mean = h_data.mean(axis=0) N = h_data.shape[0] loss_expect = 0 for i in six.moves.range(h_data.shape[1]): for j in six.moves.range(h_data.shape[1]): ij_loss = 0. if i != j: for n in six.moves.range(N): ij_loss += (h_data[n, i] - h_mean[i]) * (h_data[n, j] - h_mean[j]) ij_loss /= N loss_expect += ij_loss**2 loss_expect *= 0.5 self.assertAlmostEqual(loss_expect, loss_value, places=5)
def check_forward(self, h_data): h = chainer.Variable(h_data) loss = functions.decov(h) self.assertEqual(loss.data.shape, ()) self.assertEqual(loss.data.dtype, numpy.float32) loss_value = float(loss.data) # Compute expected value h_data = cuda.to_cpu(h_data) h_mean = h_data.mean(axis=0) N = h_data.shape[0] loss_expect = 0 for i in six.moves.range(h_data.shape[1]): for j in six.moves.range(h_data.shape[1]): ij_loss = 0. if i != j: for n in six.moves.range(N): ij_loss += (h_data[n, i] - h_mean[i]) * ( h_data[n, j] - h_mean[j]) ij_loss /= N loss_expect += ij_loss ** 2 loss_expect *= 0.5 self.assertAlmostEqual(loss_expect, loss_value, places=5)
def check_forward(self, h_data): h = chainer.Variable(h_data) loss = functions.decov(h, self.reduce) self.assertEqual(loss.shape, self.gloss.shape) self.assertEqual(loss.data.dtype, self.dtype) loss_value = cuda.to_cpu(loss.data) # Compute expected value h_data = cuda.to_cpu(h_data) loss_expect = _deconv(h_data) if self.reduce == 'half_squared_sum': loss_expect = (loss_expect ** 2).sum() * 0.5 numpy.testing.assert_allclose( loss_expect, loss_value, **self.forward_options)
def check_forward(self, h_data): h = chainer.Variable(h_data) loss = functions.decov(h, self.reduce) self.assertEqual(loss.shape, self.gloss.shape) self.assertEqual(loss.data.dtype, self.dtype) loss_value = cuda.to_cpu(loss.data) # Compute expected value h_data = cuda.to_cpu(h_data) loss_expect = _deconv(h_data) if self.reduce == 'half_squared_sum': loss_expect = (loss_expect**2).sum() * 0.5 numpy.testing.assert_allclose(loss_expect, loss_value, **self.forward_options)
def check_type(self, h_data, gloss_data): h = chainer.Variable(h_data) loss = functions.decov(h, self.reduce) loss.grad = gloss_data loss.backward() self.assertEqual(h_data.dtype, h.grad.dtype)
def f(h): return functions.decov(h, self.reduce)
def check_invalid_option(self, xp): h = xp.asarray(self.h) with self.assertRaises(ValueError): functions.decov(h, 'invalid_option')
def check_type(self, h_data): h = chainer.Variable(h_data) loss = functions.decov(h) loss.backward() self.assertEqual(h_data.dtype, h.grad.dtype)
def forward(self, inputs, device): h, = inputs loss = functions.decov(h, self.reduce) return loss,