Пример #1
0
def l2_loss(t, name=None):
    """
    Computes half the L2 norm of a tensor without the sqrt:

      output = sum(t ** 2) / 2

      Args:
        t:  A Tensor. Typically 2-D, but may have any dimensions.
        name: Optional name for the operation.

      Returns:
        A Tensor. Has the same type as t. 0-D.

    """

    return (ops.Reduce(ops.Square(t), operation='SUM') * 0.5)
Пример #2
0
 def LayerSetup(self, bottom):
     return _ops.Reduce(bottom, **self.arguments)
Пример #3
0
def l2_loss(t, name=None):
    return (ops.Reduce(ops.Square(t), operation='SUM') * 0.5)
Пример #4
0
 def Setup(self, bottom):
     super(ReductionLayer, self).Setup(bottom)
     input = bottom[0] if isinstance(bottom, list) else bottom
     return ops.Reduce(input, **self._param)