Exemplo n.º 1
0
 def top1(self, yhat):
     yhatT = tf.transpose(yhat)
     term1 = tf.reduce_mean(tf.nn.sigmoid(-gpu_diag_wide(yhat) + yhatT) +
                            tf.nn.sigmoid(yhatT**2),
                            axis=0)
     term2 = tf.nn.sigmoid(gpu_diag_wide(yhat)**
                           2) / (self.batch_size + self.n_samples)
     return tf.reduce_mean(term1 - term2)
Exemplo n.º 2
0
 def cross_entropy_logits(self, yhat, M):
     if self.smoothing:
         n_out = M + self.n_sample
         return T.cast(
             T.mean((1.0 -
                     (n_out /
                      (n_out - 1)) * self.smoothing) * gpu_diag_wide(yhat) +
                    (self.smoothing / (n_out - 1)) * T.sum(yhat, axis=1)),
             theano.config.floatX)
     else:
         return T.cast(T.mean(gpu_diag_wide(yhat)), theano.config.floatX)
Exemplo n.º 3
0
 def top1(self, yhat, M):
     ydiag = gpu_diag_wide(yhat).dimshuffle((0, 'x'))
     return T.cast(
         T.mean(
             T.mean(T.nnet.sigmoid(-ydiag + yhat) + T.nnet.sigmoid(yhat**2),
                    axis=1) - T.nnet.sigmoid(ydiag**2) /
             (M + self.n_sample)), theano.config.floatX)
Exemplo n.º 4
0
    def bpr_max(self, yhat):

        softmax_scores = self.softmax_neg(yhat)

        ###tf.expand_dims(gpu_diag_wide(yhat),1)  [y1,y1,y1,...,y1][y2,...]
        term1 = - tf.log(
            tf.reduce_sum(tf.sigmoid(tf.expand_dims(gpu_diag_wide(yhat),1) - yhat) * softmax_scores, axis=1) + 1e-24)
        term2 = self.bpr_max_lambda * tf.reduce_sum((yhat ** 2) * softmax_scores, axis=1)
        return tf.reduce_mean(term1 + term2)
Exemplo n.º 5
0
 def bpr_max(self, yhat, M):
     softmax_scores = self.softmax_neg(yhat)
     return T.cast(
         T.mean(-T.log(
             T.sum(T.nnet.sigmoid(
                 gpu_diag_wide(yhat).dimshuffle(
                     (0, 'x')) - yhat) * softmax_scores,
                   axis=1) + 1e-24) +
                self.bpreg * T.sum((yhat**2) * softmax_scores, axis=1)),
         theano.config.floatX)
Exemplo n.º 6
0
 def top1_max(self, yhat):
     term1 = self.softmax_neg(yhat)
     term2 = tf.sigmoid(-tf.expand_dims(gpu_diag_wide(yhat), 1) +
                        yhat) + tf.sigmoid(yhat**2)
     return tf.reduce_mean(tf.reduce_sum(term1 * term2, axis=1))
Exemplo n.º 7
0
 def bpr(self, yhat):
     yhatT = tf.transpose(yhat)
     return tf.reduce_mean(
         -tf.log(tf.nn.sigmoid(gpu_diag_wide(yhat) - yhatT)))
Exemplo n.º 8
0
 def cross_entropy(self, yhat):
     return tf.reduce_mean(-tf.log(gpu_diag_wide(yhat) + 1e-24))
Exemplo n.º 9
0
 def top1_max(self, yhat, M):
     softmax_scores = self.softmax_neg(yhat)
     y = softmax_scores * (T.nnet.sigmoid(-gpu_diag_wide(yhat).dimshuffle(
         (0, 'x')) + yhat) + T.nnet.sigmoid(yhat**2))
     return T.cast(T.mean(T.sum(y, axis=1)), theano.config.floatX)
Exemplo n.º 10
0
 def bpr(self, yhat, M):
     return T.cast(
         T.mean(-T.log(
             T.nnet.sigmoid(
                 gpu_diag_wide(yhat).dimshuffle((0, 'x')) - yhat))),
         theano.config.floatX)
Exemplo n.º 11
0
 def top1_max(self, yhat, M):
     softmax_scores = self.softmax_neg(yhat)
     y = softmax_scores*(T.nnet.sigmoid(-gpu_diag_wide(yhat).dimshuffle((0, 'x'))+yhat)+T.nnet.sigmoid(yhat**2))
     return T.cast(T.mean(T.sum(y, axis=1)), theano.config.floatX)
Exemplo n.º 12
0
 def top1(self, yhat, M):
     ydiag = gpu_diag_wide(yhat).dimshuffle((0, 'x'))
     return T.cast(T.mean(T.mean(T.nnet.sigmoid(-ydiag+yhat)+T.nnet.sigmoid(yhat**2), axis=1)-T.nnet.sigmoid(ydiag**2)/(M+self.n_sample)), theano.config.floatX)
Exemplo n.º 13
0
 def bpr_max(self, yhat, M):
     softmax_scores = self.softmax_neg(yhat)
     return T.cast(T.mean(-T.log(T.sum(T.nnet.sigmoid(gpu_diag_wide(yhat).dimshuffle((0,'x'))-yhat)*softmax_scores, axis=1)+1e-24)+self.bpreg*T.sum((yhat**2)*softmax_scores, axis=1)), theano.config.floatX)
Exemplo n.º 14
0
 def bpr(self, yhat, M):
     return T.cast(T.mean(-T.log(T.nnet.sigmoid(gpu_diag_wide(yhat).dimshuffle((0, 'x'))-yhat))), theano.config.floatX)
Exemplo n.º 15
0
 def cross_entropy_logits(self, yhat, M):
     if self.smoothing:
         n_out = M + self.n_sample
         return T.cast(T.mean((1.0-(n_out/(n_out-1))*self.smoothing) * gpu_diag_wide(yhat) + (self.smoothing/(n_out-1)) * T.sum(yhat, axis=1)), theano.config.floatX)
     else:
         return T.cast(T.mean(gpu_diag_wide(yhat)), theano.config.floatX)