Example #1
0
def _build_classification_loss(loss_config):
    """Builds a classification loss based on the loss config.

    Args:
      loss_config: A losses_pb2.ClassificationLoss object.

    Returns:
      Loss based on the config.

    Raises:
      ValueError: On invalid loss_config.
    """
    if not isinstance(loss_config, losses_pb2.ClassificationLoss):
        raise ValueError('loss_config not of type losses_pb2.ClassificationLoss.')

    loss_type = loss_config.WhichOneof('classification_loss')

    if loss_type == 'weighted_sigmoid':
        config = loss_config.weighted_sigmoid
        return losses.WeightedSigmoidClassificationLoss(
            anchorwise_output=config.anchorwise_output)

    if loss_type == 'weighted_softmax':
        config = loss_config.weighted_softmax
        return losses.WeightedSoftmaxClassificationLoss(
            anchorwise_output=config.anchorwise_output)

    if loss_type == 'bootstrapped_sigmoid':
        config = loss_config.bootstrapped_sigmoid
        return losses.BootstrappedSigmoidClassificationLoss(
            alpha=config.alpha,
            bootstrap_type=('hard' if config.hard_bootstrap else 'soft'),
            anchorwise_output=config.anchorwise_output)

    raise ValueError('Empty loss config.')
Example #2
0
 def testReturnsCorrectAnchorWiseLoss(self):
   prediction_tensor = tf.constant([[[-100, 100, -100],
                                     [100, -100, -100],
                                     [100, 0, -100],
                                     [-100, -100, 100]],
                                    [[-100, 0, 100],
                                     [-100, 100, -100],
                                     [100, 100, 100],
                                     [0, 0, -1]]], tf.float32)
   target_tensor = tf.constant([[[0, 1, 0],
                                 [1, 0, 0],
                                 [1, 0, 0],
                                 [0, 0, 1]],
                                [[0, 0, 1],
                                 [0, 1, 0],
                                 [1, 1, 1],
                                 [1, 0, 0]]], tf.float32)
   weights = tf.constant([[1, 1, 1, 1],
                          [1, 1, 1, 0]], tf.float32)
   alpha = tf.constant(.5, tf.float32)
   loss_op = losses.BootstrappedSigmoidClassificationLoss(
       alpha, bootstrap_type='hard')
   loss = loss_op(prediction_tensor, target_tensor, weights=weights)
   loss = tf.reduce_sum(loss, axis=2)
   exp_loss = np.matrix([[0, 0, -math.log(.5), 0],
                         [-math.log(.5), 0, 0, 0]])
   with self.test_session() as sess:
     loss_output = sess.run(loss)
     self.assertAllClose(loss_output, exp_loss)
Example #3
0
def _build_classification_loss(loss_config):
    """Builds a classification loss based on the loss config.

  Args:
    loss_config: A losses_pb2.ClassificationLoss object.

  Returns:
    Loss based on the config.

  Raises:
    ValueError: On invalid loss_config.
  """
    if not isinstance(loss_config, losses_pb2.ClassificationLoss):
        raise ValueError(
            'loss_config not of type losses_pb2.ClassificationLoss.')

    loss_type = loss_config.WhichOneof('classification_loss')

    if loss_type == 'weighted_sigmoid':
        return losses.WeightedSigmoidClassificationLoss()

    if loss_type == 'weighted_sigmoid_focal':
        config = loss_config.weighted_sigmoid_focal
        # alpha = None
        # if config.HasField('alpha'):
        #   alpha = config.alpha
        if config.alpha > 0:
            alpha = config.alpha
        else:
            alpha = None
        return losses.SigmoidFocalClassificationLoss(gamma=config.gamma,
                                                     alpha=alpha)
    if loss_type == 'weighted_softmax_focal':
        config = loss_config.weighted_softmax_focal
        # alpha = None
        # if config.HasField('alpha'):
        #   alpha = config.alpha
        if config.alpha > 0:
            alpha = config.alpha
        else:
            alpha = None
        return losses.SoftmaxFocalClassificationLoss(gamma=config.gamma,
                                                     alpha=alpha)

    if loss_type == 'weighted_softmax':
        config = loss_config.weighted_softmax
        return losses.WeightedSoftmaxClassificationLoss(
            logit_scale=config.logit_scale)

    if loss_type == 'bootstrapped_sigmoid':
        config = loss_config.bootstrapped_sigmoid
        return losses.BootstrappedSigmoidClassificationLoss(
            alpha=config.alpha,
            bootstrap_type=('hard' if config.hard_bootstrap else 'soft'))

    raise ValueError('Empty loss config.')
Example #4
0
def _build_classification_loss(loss_config):
    """Builds a classification loss based on the loss config.

    Args:
    loss_config: A yaml.ClassificationLoss object.

    Returns:
    Loss based on the config.

    Raises:
    ValueError: On invalid loss_config.
    """
    if 'weighted_sigmoid' in loss_config:
        return losses.WeightedSigmoidClassificationLoss()

    if 'weighted_sigmoid_focal' in loss_config:
        config = loss_config.weighted_sigmoid_focal
        # alpha = None
        # if config.HasField('alpha'):
        #   alpha = config.alpha
        if config.alpha > 0:
            alpha = config.alpha
        else:
            alpha = None
        return losses.SigmoidFocalClassificationLoss(gamma=config.gamma,
                                                     alpha=alpha)
    if 'weighted_softmax_focal' in loss_config:
        config = loss_config.weighted_softmax_focal
        # alpha = None
        # if config.HasField('alpha'):
        #   alpha = config.alpha
        if config.alpha > 0:
            alpha = config.alpha
        else:
            alpha = None
        return losses.SoftmaxFocalClassificationLoss(gamma=config.gamma,
                                                     alpha=alpha)

    if 'weighted_softmax' in loss_config:
        config = loss_config.weighted_softmax
        return losses.WeightedSoftmaxClassificationLoss(
            logit_scale=config.logit_scale)

    if 'bootstrapped_sigmoid' in loss_config:
        config = loss_config.bootstrapped_sigmoid
        return losses.BootstrappedSigmoidClassificationLoss(
            alpha=config.alpha,
            bootstrap_type=('hard' if config.hard_bootstrap else 'soft'))
    else:
        raise ValueError('Empty loss config.')
Example #5
0
 def testReturnsCorrectLossSoftBootstrapping(self):
     prediction_tensor = tf.constant(
         [[[-100, 100, 0], [100, -100, -100], [100, -100, -100],
           [-100, -100, 100]],
          [[-100, -100, 100], [-100, 100, -100], [100, 100, 100],
           [0, 0, -1]]], tf.float32)
     target_tensor = tf.constant(
         [[[0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1]],
          [[0, 0, 1], [0, 1, 0], [1, 1, 1], [1, 0, 0]]], tf.float32)
     weights = tf.constant([[1, 1, 1, 1], [1, 1, 1, 0]], tf.float32)
     alpha = tf.constant(.5, tf.float32)
     loss_op = losses.BootstrappedSigmoidClassificationLoss(
         alpha, bootstrap_type='soft')
     loss = loss_op(prediction_tensor, target_tensor, weights=weights)
     exp_loss = -math.log(.5)
     with self.test_session() as sess:
         loss_output = sess.run(loss)
         self.assertAllClose(loss_output, exp_loss)