Example #1
0
 def backward(self, inputs, grad_outputs):
     xp = cuda.get_array_module(*inputs)
     x, t = inputs
     gloss = grad_outputs[0]
     y, = sigmoid.Sigmoid().forward((x,))
     if self.reduce == 'mean':
         gx = xp.divide(
             gloss * self.ignore_mask * (y - t), self.count,
             dtype=y.dtype)
     else:
         gx = (gloss * self.ignore_mask * (y - t)).astype(y.dtype)
     return gx, None
    def forward(self, inputs):
        self.retain_inputs((0, 1))

        xp = cuda.get_array_module(*inputs)
        x, gy = inputs

        y, = sigmoid.Sigmoid().forward((x, ))
        if self.reduce == 'mean':
            gx = xp.divide(gy * self.ignore_mask * (y - self.t),
                           self.count,
                           dtype=y.dtype)
        else:
            gx = (gy * self.ignore_mask * (y - self.t)).astype(y.dtype)

        return gx, None
    def forward(self, inputs):
        self.retain_inputs((0, 1))

        xp = backend.get_array_module(*inputs)
        x, gy = inputs

        y, = sigmoid.Sigmoid().forward((x, ))
        if self.reduce == 'mean':
            # TODO(takagi): Fix to perform division in a specific dtype. See
            # cupy/cupy#1534.
            gx = xp.divide(gy * self.ignore_mask * (y - self.t),
                           self.count).astype(y.dtype)
        else:
            gx = (gy * self.ignore_mask * (y - self.t)).astype(y.dtype)

        return gx,