Exemplo n.º 1
0
 def forward(self, inputs, **kwargs):
     outputs = normalization_ops.batch_norm(
         [inputs, self.gamma, self.beta, self.moving_mean, self.moving_var],
         axis=self.axis,
         momentum=self.decay,
         epsilon=self.epsilon,
         use_stats=0 if self.training else 1)
     if self.act:
         outputs = self.act(outputs)
     return outputs
Exemplo n.º 2
0
def batch_normalization(
    x,
    moving_mean,
    moving_variance,
    offset,
    scale,
    axis=-1,
    momentum=0.9,
    variance_epsilon=1e-5,
    trainable=False,
    name=None,
):
    r"""Apply the batch normalization.
    `[Ioffe & Szegedy, 2015] <https://arxiv.org/abs/1502.03167>`_.

    The normalization is defined as:

    .. math::
        y = \frac{x - \mathrm{E}[x]}{\sqrt{\mathrm{Var}[x] + \epsilon}} * \gamma + \beta

    The moving average of stats are calculated as:

    .. math:: x_{\text{running}} = \text{momentum} * x_{\text{running}} +
                                   (1 - \text{momentum}) * x_{\text{batch}}

    Parameters
    ----------
    x : dragon.Tensor
        The input tensor.
    moving_mean : dragon.Tensor
        The moving mean.
    moving_variance : dragon.Tensor
        The moving variance.
    offset : dragon.Tensor
        The :math:`\beta` tensor.
    scale : dragon.Tensor
        The :math:`\gamma` tensor.
    axis : int, optional, default=-1
        The channel axis.
    momentum : Union[float, dragon.Tensor], optional
        The value to :math:`\text{momentum}`.
    variance_epsilon : float, optional, default=1e-5
        The value to :math:`\epsilon`.
    trainable : bool, optional, default=False
        The optional training flag.
    name : str, optional
        The operation name.

    Returns
    -------
    dragon.Tensor
        The output tensor.

    """
    return normalization_ops.batch_norm(
        [x, scale, offset, moving_mean, moving_variance],
        axis=axis,
        momentum=momentum,
        epsilon=variance_epsilon,
        use_stats=not trainable,
        name=name,
    )
Exemplo n.º 3
0
def fused_batch_norm(
    x,
    scale,
    offset,
    mean,
    variance,
    epsilon=0.001,
    data_format='NHWC',
    is_training=True,
    name=None,
    exponential_avg_factor=1.0,
):
    r"""Apply the batch normalization.
    `[Ioffe & Szegedy, 2015] <https://arxiv.org/abs/1502.03167>`_.

    The normalization is defined as:

    .. math:: y = \frac{x - \mathrm{E}[x]}
                       {\sqrt{\mathrm{Var}[x] + \epsilon}}
                  * \gamma + \beta

    The moving average of stats are calculated as:

    .. math:: x_{\text{moving}} = \text{momentum} * x_{\text{moving}} +
                                  + (1 - \text{momentum}) * x_{\text{batch}}

    Parameters
    ----------
    x : dragon.Tensor
        The input tensor.
    scale : dragon.Tensor
        The :math:`\gamma` tensor.
    offset : dragon.Tensor
        The :math:`\beta` tensor.
    mean : dragon.Tensor
        The running mean tensor.
    variance : dragon.Tensor
        The running variance tensor.
    epsilon : float, optional, default=1e-3
        The value to :math:`\epsilon`.
    data_format : str, optional, default='NHWC'
        ``'NCHW'`` or ``'NHWC'``.
    is_training : bool, optional, default=True
        The value to indicate training or inference.
    name : str, optional
        The operation name.
    exponential_avg_factor : float, optional, default=1.0
        The value to :math:`1 - \text{momentum}`.

    Returns
    -------
    dragon.Tensor
        The output tensor.

    """
    return normalization_ops.batch_norm(
        [x, scale, offset, mean, variance],
        axis=1 if data_format.startswith('NC') else -1,
        momentum=1 - exponential_avg_factor,
        epsilon=epsilon,
        use_stats=not is_training,
        name=name,
    )
Exemplo n.º 4
0
 def __call__(self, bottom):
     inputs = [bottom, self._weight, self._bias] + \
              [blob['data'] for blob in self._blobs[:2]]
     return normalization_ops.batch_norm(inputs, **self.arguments)
Exemplo n.º 5
0
 def call(self, inputs, training=None):
     return normalization_ops.batch_norm(
         [inputs, self.gamma, self.beta,
          self.moving_mean, self.moving_variance],
         axis=self.axis, momentum=self.momentum, epsilon=self.epsilon,
         use_stats=not training)
Exemplo n.º 6
0
 def __call__(self, bottom):
     if len(self.blobs) == 0:
         self.build(bottom)
     inputs = [bottom, self.weight, self.bias] + \
              [blob['data'] for blob in self.blobs[:2]]
     return normalization_ops.batch_norm(inputs, **self.call_args)