Esempio n. 1
0
def sparse_softmax_cross_entropy(
    labels,
    logits,
    weights=1.,
    scope=None,
    loss_collection=ops.GraphKeys.LOSSES,
    reduction=Reduction.SUM_BY_NONZERO_WEIGHTS,
):
    if labels is None: raise ValueError("labels must not be None.")
    if logits is None: raise ValueError("logits must not be None.")
    normalization = None
    if reduction == Reduction.NONE: normalization = 'UNIT'
    elif reduction == Reduction.MEAN: normalization = 'FULL'
    elif reduction == Reduction.SUM_BY_NONZERO_WEIGHTS or \
            reduction == Reduction.SUM_OVER_NONZERO_WEIGHTS:
        normalization = 'NONE'
    elif reduction == Reduction.SUM_OVER_BATCH_SIZE:
        normalization = 'BATCH_SIZE'
    loss = _ops.SparseSoftmaxCrossEntropy(
        [logits, labels],
        normalization=normalization,
        name=scope,
    )
    if weights != 1.0: loss = weights * loss
    ops.add_to_collection(loss_collection, loss)
    return loss
Esempio n. 2
0
def sparse_softmax_cross_entropy_with_logits(logits,
                                             labels,
                                             dim=-1,
                                             name=None):

    if dim == -1: dim = 1
    return ops.SparseSoftmaxCrossEntropy([logits, labels],
                                         axis=dim,
                                         normalization='UNIT',
                                         name=name)
Esempio n. 3
0
def sparse_softmax_cross_entropy_with_logits(
    _sentinel=None,
    labels=None,
    logits=None,
    dim=-1,
    name=None,
):
    return _ops.SparseSoftmaxCrossEntropy(
        [logits, labels],
        axis=dim,
        normalization='UNIT',
        name=name,
    )
Esempio n. 4
0
 def Setup(self, bottom):
     super(SoftmaxWithLossLayer, self).Setup(bottom)
     loss = ops.SparseSoftmaxCrossEntropy(bottom, **self._param)
     if self._loss_weight is not None: loss *= self._loss_weight
     return loss
Esempio n. 5
0
File: loss.py Progetto: yyaqi/Dragon
 def LayerSetup(self, bottom):
     loss = _ops.SparseSoftmaxCrossEntropy(bottom, **self.arguments)
     if self._loss_weight is not None: loss *= self._loss_weight
     return loss