Exemple #1
0
 def cross_entropy_logits(self, yhat, M):
     if self.smoothing:
         n_out = M + self.n_sample
         return T.cast(
             T.mean((1.0 -
                     (n_out /
                      (n_out - 1)) * self.smoothing) * gpu_diag_wide(yhat) +
                    (self.smoothing / (n_out - 1)) * T.sum(yhat, axis=1)),
             theano.config.floatX)
     else:
         return T.cast(T.mean(gpu_diag_wide(yhat)), theano.config.floatX)
Exemple #2
0
 def top1(self, yhat, M):
     ydiag = gpu_diag_wide(yhat).dimshuffle((0, 'x'))
     return T.cast(
         T.mean(
             T.mean(T.nnet.sigmoid(-ydiag + yhat) + T.nnet.sigmoid(yhat**2),
                    axis=1) - T.nnet.sigmoid(ydiag**2) /
             (M + self.n_sample)), theano.config.floatX)
Exemple #3
0
 def bpr_max(self, yhat, M):
     softmax_scores = self.softmax_neg(yhat)
     return T.cast(
         T.mean(-T.log(
             T.sum(T.nnet.sigmoid(
                 gpu_diag_wide(yhat).dimshuffle(
                     (0, 'x')) - yhat) * softmax_scores,
                   axis=1) + 1e-24) +
                self.bpreg * T.sum((yhat**2) * softmax_scores, axis=1)),
         theano.config.floatX)
Exemple #4
0
 def top1_max(self, yhat, M):
     softmax_scores = self.softmax_neg(yhat)
     y = softmax_scores * (T.nnet.sigmoid(-gpu_diag_wide(yhat).dimshuffle(
         (0, 'x')) + yhat) + T.nnet.sigmoid(yhat**2))
     return T.cast(T.mean(T.sum(y, axis=1)), theano.config.floatX)
Exemple #5
0
 def bpr(self, yhat, M):
     return T.cast(
         T.mean(-T.log(
             T.nnet.sigmoid(
                 gpu_diag_wide(yhat).dimshuffle((0, 'x')) - yhat))),
         theano.config.floatX)