def test_crossentropy(): X = np.random.rand(100, 10).astype(np.float32) O = np.random.rand(100, 10).astype(np.float32) X /= X.sum(1)[:, None] O /= O.sum(1)[:, None] Y_expected = -np.sum(X * np.log(O)) / X.shape[0] rtol=1e-4 Y = np.empty_like(X) Yhr = op.cross_entropy(X, O) assert_allclose(Y_expected, Yhr, err_msg="CPU, no target", rtol=rtol) Xd = op.to_gpu(X) Od = op.to_gpu(O) Yd = op.cross_entropy(Xd, Od) assert_allclose(Y_expected, op.to_cpu(Yd), err_msg="GPU, no target", rtol=rtol)
def _get_loss(self, target, pred): if self.loss == "crossentropy": op.streams[0].synchronize() return op.cross_entropy(target, pred, stream=op.streams[3]) elif self.loss == "squarederror": return op.mean_squared_error(target, pred) else: raise NotImplementedError()
def test_crossentropy(): X = np.random.rand(100, 10).astype(np.float32) O = np.random.rand(100, 10).astype(np.float32) X /= X.sum(1)[:, None] O /= O.sum(1)[:, None] Y_expected = -np.sum(X * np.log(O)) / X.shape[0] rtol = 1e-4 Y = np.empty_like(X) Yhr = op.cross_entropy(X, O) assert_allclose(Y_expected, Yhr, err_msg="CPU, no target", rtol=rtol) Xd = op.to_gpu(X) Od = op.to_gpu(O) Yd = op.cross_entropy(Xd, Od) assert_allclose(Y_expected, op.to_cpu(Yd), err_msg="GPU, no target", rtol=rtol)
def cross_entropy_loss(w, x, y, net): unserialize(w, net) a = net.forward_pass(x) retval = op.cross_entropy(y, a) return retval