Пример #1
0
 def forward(self, x, t):
     N = x.shape[0]
     log_z = utils.logsumexp(x, axis=1)
     log_p = x - log_z
     log_p = log_p[np.arange(N), t.ravel()]
     y = -log_p.sum() / np.float32(N)
     return y
Пример #2
0
 def forward(self, x, t):
     xp = cuda.get_array_module(t.data)
     N = x.shape[0]
     log_z = utils.logsumexp(x, axis=1)
     log_p = x - log_z
     log_p = log_p[xp.arange(N), t.ravel()]
     y = -log_p.sum() / xp.float32(N)
     return y
 def forward(self, x, t):
     N = x.shape[0]
     log_z = utils.logsumexp(x, axis=1)
     log_p = x - log_z
     # ravel flatten multi dim data to one dim data
     log_p = log_p[np.arange(N), t.ravel()]
     y = -log_p.sum() / np.float32(N)
     return y
 def forward(self, x):
     log_z = utils.logsumexp(x, self.axis)
     y = x - log_z
     return y