def check_value_check(self, x_data, t_data, use_cudnn): x = functions.softmax(chainer.Variable(x_data)) t = chainer.Variable(t_data) if self.valid: # Check if it throws nothing cross_entropy(x, t, use_cudnn) else: with self.assertRaises(ValueError): cross_entropy(x, t, use_cudnn)
def check_forward(self, x_data, t_data, use_cudnn=True): x = functions.softmax(chainer.Variable(x_data)) t = chainer.Variable(t_data) loss = cross_entropy( x, t, use_cudnn=use_cudnn, cache_score=self.cache_score) self.assertEqual(loss.data.shape, ()) self.assertEqual(loss.data.dtype, numpy.float32) self.assertEqual(hasattr(loss.creator, 'y'), self.cache_score) loss_value = float(cuda.to_cpu(loss.data)) # Compute expected value loss_expect = 0.0 count = 0 x = numpy.rollaxis(self.x, 1, self.x.ndim).reshape( (self.t.size, self.x.shape[1])) t = self.t.ravel() for xi, ti in six.moves.zip(x, t): if ti == -1: continue log_z = numpy.ufunc.reduce(numpy.logaddexp, xi) loss_expect -= (xi - log_z)[ti] count += 1 if count == 0: loss_expect = 0.0 else: loss_expect /= count self.assertAlmostEqual(loss_expect, loss_value, places=5)