def image_loss(self, logits, labels):
     """Cross-entropy between the logits and labels."""
     logits = tf.squeeze(logits, axis=-2)
     loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels,
                                                           logits=logits)
     loss = tf.reduce_mean(loss, axis=0)
     loss = base_utils.nats_to_bits(tf.reduce_sum(loss))
     return loss / (64.0 * 64.0)
Exemple #2
0
    def loss(self, targets, logits, train_config, training, aux_output=None):
        if training:
            labels = targets['targets_slice']
        else:
            labels = targets['targets']

        height, width, num_channels = labels.shape[1:]
        loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels,
                                                              logits=logits)
        loss = tf.reduce_mean(loss, axis=0)
        loss = base_utils.nats_to_bits(tf.reduce_sum(loss))
        loss = loss / (height * width * num_channels)
        return loss, {}
Exemple #3
0
    def loss(self, targets, logits, train_config, training, aux_output=None):
        is_downsample = train_config.get('downsample', False)
        downsample_res = train_config.get('downsample_res', 64)
        if is_downsample and training:
            labels = targets['targets_slice_%d' % downsample_res]
        elif is_downsample:
            labels = targets['targets_%d' % downsample_res]
        elif training:
            labels = targets['targets_slice']
        else:
            labels = targets['targets']

        height, width, num_channels = labels.shape[1:4]
        loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels,
                                                              logits=logits)
        loss = tf.reduce_mean(loss, axis=0)
        loss = base_utils.nats_to_bits(tf.reduce_sum(loss))
        loss = loss / (height * width * num_channels)
        return loss, {}