Exemple #1
0
  def call(self,
           logits=None,
           input_length=None,
           labels=None,
           label_length=None,
           soft_labels=None,
           **kwargs):

    loss_standard = cross_entropy(
        logits=logits,
        input_length=input_length,
        labels=labels,
        label_length=label_length,
        smoothing=self.smoothing)
    loss_soft = cross_entropy(
        logits=logits / self.T,
        input_length=input_length,
        labels=soft_labels,
        label_length=label_length,
        smoothing=self.smoothing)
    # Since the magnitudes of the gradients produced by the soft targets
    # scale as 1/T2 , it is important to multiply them by T2 when using
    # both hard and soft targets
    total_loss = self.alpha * tf.square(
        self.T) * loss_soft + (1 - self.alpha) * loss_standard

    return total_loss
    def test_cross_entropy(self):
        ''' test cross entropy'''
        with self.cached_session():
            loss = loss_utils.cross_entropy(logits=tf.constant(self.logits),
                                            input_length=None,
                                            labels=tf.constant(self.labels),
                                            label_length=None)
            self.assertAllClose(loss.eval(), 0.0, rtol=1e-06, atol=1.5e-6)

            loss = loss_utils.cross_entropy(
                logits=tf.constant(self.seq_logits),
                input_length=tf.constant(self.input_length),
                labels=tf.constant(self.seq_labels),
                label_length=tf.constant(self.label_length),
                reduction=tf.losses.Reduction.NONE)
            self.assertEqual(loss.eval().shape, (2, 3))
            self.assertAllClose(loss.eval(),
                                np.zeros((2, 3), dtype=np.float32),
                                rtol=1e-06,
                                atol=1.5e-6)

            loss = loss_utils.cross_entropy(
                logits=tf.constant(self.seq_logits),
                input_length=tf.constant(self.input_length),
                labels=tf.constant(self.seq_labels),
                label_length=tf.constant(self.label_length),
                reduction=tf.losses.Reduction.SUM_BY_NONZERO_WEIGHTS)
            self.assertEqual(loss.eval().shape, ())
            self.assertAllClose(loss.eval(), 0.0, rtol=1e-06, atol=1.5e-6)
Exemple #3
0
  def test_focal_loss(self):
    with self.cached_session():
      logits = np.array([[22, 23, 24]], dtype=np.float32)
      labels = np.array([2], dtype=np.int32)

      ce_loss = loss_utils.cross_entropy(
          logits=tf.constant(logits),
          input_length=None,
          labels=tf.constant(labels),
          label_length=None)

      fl_loss0 = loss_utils.focal_loss(
          logits=tf.constant(logits),
          labels=tf.constant(labels),
          gamma = 0)

      self.assertAllClose(fl_loss0.eval(), 0.407606, rtol=1e-06, atol=1e-6)
      self.assertAllClose(fl_loss0.eval(), ce_loss.eval(), rtol=1e-07, atol=1e-7)

      fl_loss2 = loss_utils.focal_loss(
          logits=tf.constant(logits),
          labels=tf.constant(labels),
          gamma = 2)

      fl_loss5 = loss_utils.focal_loss(
          logits=tf.constant(logits),
          labels=tf.constant(labels),
          gamma = 5)

      self.assertAllClose(fl_loss2.eval(), 0.045677, rtol=1e-06, atol=1e-6)
      self.assertAllClose(fl_loss5.eval(), 0.001713, rtol=1e-06, atol=1e-6)
Exemple #4
0
    def call(self,
             logits=None,
             input_length=None,
             labels=None,
             label_length=None,
             **kwargs):

        loss = cross_entropy(logits=logits,
                             input_length=input_length,
                             labels=labels,
                             label_length=label_length,
                             smoothing=self.smoothing)
        return loss
Exemple #5
0
    def call(self,
             logits=None,
             input_length=None,
             labels=None,
             label_length=None,
             soft_labels=None):

        del soft_labels
        loss = cross_entropy(logits=logits,
                             input_length=input_length,
                             labels=labels,
                             label_length=label_length,
                             smoothing=self.smoothing)
        return loss