Beispiel #1
0
def compute_loss(y, t):
    # softmax cross entropy
    score = chx.log_softmax(y, axis=1)
    mask = (t[:, chx.newaxis] == chx.arange(
        10, dtype=t.dtype)).astype(score.dtype)
    # TODO(beam2d): implement mean
    return -(score * mask).sum() * (1 / y.shape[0])
Beispiel #2
0
def compute_loss(y, t):
    # softmax cross entropy
    score = chx.log_softmax(y, axis=1)
    mask = (t[:, chx.newaxis] == chx.arange(1000,
                                            dtype=t.dtype)).astype(score.dtype)
    # TODO(beam2d): implement mean
    return -(score * mask).sum() * (1 / y.shape[0])
Beispiel #3
0
 def forward_xp(self, inputs, xp):
     x, = inputs
     axis = self.axis
     if xp is chainerx:
         return chainerx.log_softmax(x, axis=axis),
     x = x.astype(self.out_dtype)
     axis = axis if axis is not None else 1
     return x - numpy.log(numpy.exp(x).sum(axis=axis, keepdims=True)),
 def forward_chainerx(self, inputs):
     # TODO(niboshi): Current implementation is only intended to support
     # MNIST example.
     x, t = inputs
     num_classes = x.shape[1]
     score = chainerx.log_softmax(x, axis=1)
     mask = (t[:, chainerx.newaxis] == chainerx.arange(
         num_classes, dtype=t.dtype, device=x.device)).astype(score.dtype)
     # TODO(beam2d): implement mean
     y = -(score * mask).sum() * (1 / x.shape[0])
     return y,
 def forward_chainerx(self, inputs):
     # TODO(niboshi): Current implementation is only intended to support
     # MNIST example.
     x, t = inputs
     num_classes = x.shape[1]
     score = chainerx.log_softmax(x, axis=1)
     mask = (t[:, chainerx.newaxis] == chainerx.arange(
         num_classes, dtype=t.dtype, device=x.device)).astype(score.dtype)
     # TODO(beam2d): implement mean
     y = -(score * mask).sum() * (1 / x.shape[0])
     return y,
Beispiel #6
0
    def forward_chainerx(self, inputs):
        if self.reduce == 'mean' and self.normalize:
            x, t = inputs
            n_classes = x.shape[1]
            score = chainerx.log_softmax(x, axis=1)
            mask = (t[:, chainerx.newaxis] == chainerx.arange(
                n_classes, dtype=t.dtype, device=x.device)).astype(score.dtype)
            y = (score * mask).sum() * (-1 / mask.sum())
            return y,

        x, t = inputs
        y = chainerx.softmax_cross_entropy(x, t)
        if self.reduce == 'mean':
            return y.mean(),
        return y,
Beispiel #7
0
def test_log_softmax_invalid(device, a_shape, axis, dtype):
    a = array_utils.create_dummy_ndarray(chainerx, a_shape, dtype)
    with pytest.raises(chainerx.DimensionError):
        return chainerx.log_softmax(a, axis=axis)
Beispiel #8
0
 def forward_chainerx(self, xs):
     return chainerx.log_softmax(xs[0], axis=self.axis),
Beispiel #9
0
def compute_loss(y, t):
	score = chx.log_softmax(y, axis=1)
	mask = (t[:, chx.newaxis]==chx.arange(10, dtype=t.dtype)).astype(score.dtype)
	return -(score * mask).sum() * (1 / y.shape[0])
Beispiel #10
0
def test_log_softmax_invalid(device, a_shape, axis, float_dtype):
    a = array_utils.create_dummy_ndarray(chainerx, a_shape, float_dtype)
    with pytest.raises(chainerx.DimensionError):
        return chainerx.log_softmax(a, axis=axis)
Beispiel #11
0
 def forward_chainerx(self, xs):
     return chainerx.log_softmax(xs[0], axis=self.axis),