def test_multiclass_crossentropy(): X = np.random.rand(100, 10).astype(np.float32) O = np.random.rand(100, 10).astype(np.float32) X /= X.sum(1)[:, None] O /= O.sum(1)[:, None] Y_expected = -np.sum(X * np.log(O)) / X.shape[0] rtol=1e-4 Y = np.empty_like(X) Yhr = op.multiclass_cross_entropy(X, O) assert_allclose(Y_expected, Yhr, err_msg="CPU, no target", rtol=rtol) Xd = op.to_gpu(X) Od = op.to_gpu(O) Yd = op.multiclass_cross_entropy(Xd, Od) assert_allclose(Y_expected, op.to_cpu(Yd), err_msg="GPU, no target", rtol=rtol)
def test_multiclass_crossentropy(): X = np.random.rand(100, 10).astype(np.float32) O = np.random.rand(100, 10).astype(np.float32) X /= X.sum(1)[:, None] O /= O.sum(1)[:, None] Y_expected = -np.sum(X * np.log(O)) / X.shape[0] rtol = 1e-4 Y = np.empty_like(X) Yhr = op.multiclass_cross_entropy(X, O) assert_allclose(Y_expected, Yhr, err_msg="CPU, no target", rtol=rtol) Xd = op.to_gpu(X) Od = op.to_gpu(O) Yd = op.multiclass_cross_entropy(Xd, Od) assert_allclose(Y_expected, op.to_cpu(Yd), err_msg="GPU, no target", rtol=rtol)
def _get_loss(self, target, pred): op.streams[0].synchronize() if self.loss == "crossentropy": if self.output == 'softmax': return op.multiclass_cross_entropy(target, pred, stream=op.streams[3]) elif self.output == 'sigmoid': return op.binary_cross_entropy(target, pred, stream=op.streams[3]) else: raise NotImplementedError() elif self.loss == "squarederror" or self.loss == 'mse': return op.mean_squared_error(target, pred) else: raise NotImplementedError()