Пример #1
0
 def cross_entropy_logits(self, yhat, M):
     if self.smoothing:
         n_out = M + self.n_sample
         return T.cast(
             T.sum((1.0 - (n_out /
                           (n_out - 1)) * self.smoothing) * gpu_diag(yhat) +
                   (self.smoothing / (n_out - 1)) * T.sum(yhat, axis=1)),
             theano.config.floatX)
     else:
         return T.cast(T.sum(gpu_diag(yhat)), theano.config.floatX)
Пример #2
0
 def top1(self, yhat, M):
     ydiag = gpu_diag(yhat, keepdims=True)
     return T.cast(
         T.sum(
             T.mean(T.nnet.sigmoid(-ydiag + yhat) + T.nnet.sigmoid(yhat**2),
                    axis=1) - T.nnet.sigmoid(ydiag**2) /
             (M + self.n_sample)), theano.config.floatX)
Пример #3
0
 def bpr_max(self, yhat, M):
     softmax_scores = self.softmax_neg(yhat)
     return T.cast(
         T.sum(-T.log(
             T.sum(T.nnet.sigmoid(gpu_diag(yhat, keepdims=True) - yhat) *
                   softmax_scores,
                   axis=1) + 1e-24) +
               self.bpreg * T.sum((yhat**2) * softmax_scores, axis=1)),
         theano.config.floatX)
Пример #4
0
 def top1_max(self, yhat, M):
     softmax_scores = self.softmax_neg(yhat)
     y = softmax_scores * (T.nnet.sigmoid(-gpu_diag(yhat, keepdims=True) +
                                          yhat) + T.nnet.sigmoid(yhat**2))
     return T.cast(T.sum(T.sum(y, axis=1)), theano.config.floatX)
Пример #5
0
 def bpr(self, yhat, M):
     return T.cast(
         T.sum(-T.log(T.nnet.sigmoid(gpu_diag(yhat, keepdims=True) -
                                     yhat))), theano.config.floatX)