Ejemplo n.º 1
0
    def lossGraph(self):
        '''
        Loss Graph for given Bonsai Obj
        '''
        self.regLoss = 0.5 * (self.lZ * tf.square(tf.norm(self.bonsaiObj.Z)) +
                              self.lW * tf.square(tf.norm(self.bonsaiObj.W)) +
                              self.lV * tf.square(tf.norm(self.bonsaiObj.V)) +
                              self.lT * tf.square(tf.norm(self.bonsaiObj.T)))

        if (self.bonsaiObj.numClasses > 2):
            if self.useMCHLoss is True:
                self.batch_th = tf.placeholder(tf.int64, name='batch_th')
                self.marginLoss = utils.multiClassHingeLoss(
                    tf.transpose(self.score), self.Y,
                    self.batch_th)
            else:
                self.marginLoss = utils.crossEntropyLoss(
                    tf.transpose(self.score), self.Y)
            self.loss = self.marginLoss + self.regLoss
        else:
            self.marginLoss = tf.reduce_mean(tf.nn.relu(
                1.0 - (2 * self.Y - 1) * tf.transpose(self.score)))
            self.loss = self.marginLoss + self.regLoss

        return self.loss, self.marginLoss, self.regLoss
Ejemplo n.º 2
0
    def lossGraph(self):
        '''
        Loss Graph for given Bonsai Obj
        '''
        self.regLoss = 0.5 * (self.lZ * tf.square(tf.norm(self.bonsaiObj.Z)) +
                              self.lW * tf.square(tf.norm(self.bonsaiObj.W)) +
                              self.lV * tf.square(tf.norm(self.bonsaiObj.V)) +
                              self.lT * tf.square(tf.norm(self.bonsaiObj.T)))

        # Loss functions for classification.
        if (self.bonsaiObj.isRegression is False):
            if (self.bonsaiObj.numClasses > 2):
                if self.useMCHLoss is True:
                    self.batch_th = tf.placeholder(tf.int64, name='batch_th')
                    self.marginLoss = utils.multiClassHingeLoss(
                        tf.transpose(self.score), self.Y,
                        self.batch_th)
                else:
                    self.marginLoss = utils.crossEntropyLoss(
                        tf.transpose(self.score), self.Y)
                self.loss = self.marginLoss + self.regLoss
            else:
                self.marginLoss = tf.reduce_mean(tf.nn.relu(
                    1.0 - (2 * self.Y - 1) * tf.transpose(self.score)))
                self.loss = self.marginLoss + self.regLoss

        # Loss functions for regression.
        elif (self.bonsaiObj.isRegression is True):
            if(self.regressionLoss == 'huber'):
                # Use of Huber Loss , because it is more robust to outliers.
                self.marginLoss = tf.losses.huber_loss(
                    self.Y, tf.transpose(self.score))
                self.loss = self.marginLoss + self.regLoss
            elif (self.regressionLoss == 'l2'):
                # L2 loss function.
                self.marginLoss = tf.nn.l2_loss(
                    self.Y - tf.transpose(self.score))
                self.loss = self.marginLoss + self.regLoss

        return self.loss, self.marginLoss, self.regLoss
Ejemplo n.º 3
0
 def lossGraph(self, logits, Y):
     '''
     Loss Graph for given FastObj
     '''
     lossOp = utils.crossEntropyLoss(logits, Y)
     return lossOp