Пример #1
0
def loss(logits, labels):
    """Adds all losses for the model.
    Note the final loss is not returned. Instead, the list of losses are collected
    by slim.losses. The losses are accumulated in tower_loss() and summed to
    calculate the total loss.
    Args:
      logits: List of logits from inference(). Each entry is a 2-D float Tensor.
      labels: Labels from distorted_inputs or inputs(). 1-D tensor
              of shape [batch_size]
      batch_size: integer
    """
    batch_size = FLAGS.batch_size * FLAGS.sticker_candidates
    # Reshape the labels into a dense Tensor of
    # shape [FLAGS.batch_size, num_classes].
    sparse_labels = tf.reshape(labels, [batch_size, 1])
    indices = tf.reshape(tf.range(batch_size), [batch_size, 1])
    concated = tf.concat(axis=1, values=[indices, sparse_labels])
    num_classes = logits[0].get_shape()[-1].value
    dense_labels = tf.sparse_to_dense(concated,
                                      [batch_size, num_classes],
                                      1.0, 0.0)

    # Cross entropy loss for the main softmax prediction.
    losses.cross_entropy_loss(logits[0],
                              dense_labels,
                              label_smoothing=0.1,
                              weight=1.0)

    # Cross entropy loss for the auxiliary softmax head.
    losses.cross_entropy_loss(logits[1],
                              dense_labels,
                              label_smoothing=0.1,
                              weight=0.4,
                              scope='aux_loss')
Пример #2
0
def loss(logits, labels, batch_size):

    sparse_labels = tf.reshape(labels, [batch_size,1])
    indices = tf.reshape(tf.range(batch_size), [batch_size,1])
    concated = tf.concat(1, [indices, sparse_labels])
    num_classes = logits.get_shape()[-1].value
    dense_labels = tf.sparse_to_dense(concated, [batch_size, num_classes], 1.0, 0.0)

    losses.cross_entropy_loss(logits, dense_labels, label_smoothing=0.0, weight=1.0)
Пример #3
0
def loss(logits, labels, batch_size):

    sparse_labels = tf.reshape(labels, [batch_size,1])
    indices = tf.reshape(tf.range(batch_size), [batch_size,1])
    concated = tf.concat(1, [indices, sparse_labels])
    num_classes = logits.get_shape()[-1].value
    dense_labels = tf.sparse_to_dense(concated, [batch_size, num_classes], 1.0, 0.0)

    losses.cross_entropy_loss(logits, dense_labels, label_smoothing=0.0, weight=1.0)