Exemplo n.º 1
0
def leaky_relu(features, alpha=0.2, name=None, **kwargs):
    r"""Apply the leaky rectified linear unit.

    The **LeakyReLU** function is defined as:

    .. math::
        \text{LeakyReLU}(x) =
            \begin{cases}
                x, & \text{ if } x \geq 0 \\
                \alpha * x, & \text{ otherwise }
            \end{cases}

    Parameters
    ----------
    features : dragon.Tensor
        The input tensor.
    alpha : number, optional, default=0.2
        The value to :math:`\alpha`.
    name : str, optional
        The operation name.

    Returns
    -------
    dragon.Tensor
        The output tensor.

    """
    return activation_ops.leaky_relu(features,
                                     alpha=alpha,
                                     name=name,
                                     **kwargs)
Exemplo n.º 2
0
def leaky_relu(x, alpha=0.2, name="leaky_relu", **kwargs):
    r"""Apply the leaky rectified linear unit.

    The **LeakyReLU** function is defined as:

    .. math::
        \text{LeakyReLU}(x) =
            \begin{cases}
                x, & \text{ if } x \geq 0 \\
                \alpha * x, & \text{ otherwise }
            \end{cases}

    Examples:

    ```python
    x = tl.layers.Input([10, 200])
    y = tl.layers.Dense(
        n_units=100,
        act=tl.act.lrelu(x, 0.2),
        name='dense',
    )(x)
    ```

    Parameters
    ----------
    x : dragon.Tensor
        The input tensor.
    alpha : float, optional, default=0.2
        The value to :math:`\alpha`.
    name : str, optional
        The operator name.

    Returns
    -------
    dragon.Tensor
        The output tensor.

    """
    if not (0 <= alpha <= 1):
        raise ValueError("`alpha` value must be in [0, 1]`")
    return activation_ops.leaky_relu(x, alpha=alpha, name=name, **kwargs)
Exemplo n.º 3
0
def relu(x, alpha=0, max_value=None, **kwargs):
    r"""Apply the rectified linear unit to input.
    `[Nair & Hinton, 2010] <http://www.csri.utoronto.ca/~hinton/absps/reluICML.pdf>`_.

    The **ReLU** function is defined as:

    .. math::
        \text{ReLU}(x) =
            \begin{cases}
                \min(x, v_{max}), & \text{ if } x \geq 0 \\
                \alpha * x, & \text{ otherwise }
            \end{cases}

    Examples:

    ```python
    x = tf.constant([-1, 0, 1], 'float32')
    print(tf.keras.activations.relu(x, inplace=False))
    ```

    Parameters
    ----------
    x : dragon.Tensor
        The input tensor.
    alpha : number, optional, default=0
        The value to :math:`\alpha`.
    max_value : number, optional
        The value to :math:`v_{max}`.

    """
    if max_value is not None:
        if alpha != 0:
            raise ValueError('Set either <alpha> or <max_value>.')
        if max_value != 6:
            raise ValueError('<max_value> can only be set to 6.')
        return activation_ops.relu6(x, **kwargs)
    return activation_ops.leaky_relu(x, alpha=alpha, **kwargs)
Exemplo n.º 4
0
 def __call__(self, bottom):
     if self.negative_slope > 0:
         return activation_ops.leaky_relu(bottom, self.negative_slope)
     return activation_ops.relu(bottom)
Exemplo n.º 5
0
 def call(self, inputs):
     return activation_ops.leaky_relu(inputs,
                                      alpha=self.alpha,
                                      inplace=self.inplace)