示例#1
0
    def call(self, x):
        """Numerically stable Sigmoid activation function.

        Arguments:
            x: array-like
                Input tensor.

        Returns:
            The sigmoid activation: `1 / (1 + exp(-x))`.
        """
        return M.where(x >= 0, 1 / (1 + M.exp(-x)), M.exp(x) / (1 + M.exp(x)))
示例#2
0
    def call(self, x):
        """Hyperbolic tangent activation function.

        Arguments:
            x: array-like
                Input tensor.

        Returns:
            The hyperbolic activation:
            `tanh(x) = (exp(x) - exp(-x)) / (exp(x) + exp(-x))`
        """
        return (M.exp(x) - M.exp(-x)) / (M.exp(x) + M.exp(-x))
示例#3
0
    def call(self, y_true, y_pred):
        target = y_true
        output = y_pred
        if self.from_logits:
            output = 1 / (1 + M.exp(-y_pred))

        output = M.clip(output, M.epsilon(), 1.0 - M.epsilon())
        output = -target * M.log(output) - (1.0 - target) * M.log(1.0 - output)
        return M.mean(output, axis=-1)
示例#4
0
    def call(self, x):
        """Softmax activation function.

        Arguments:
            x: array-like
                Input tensor.

        Returns:
            Tensor, output of softmax transformation.
        """
        exps = M.exp(x - M.max(x, axis=-1, keepdims=True))
        return exps / M.sum(exps, axis=-1, keepdims=True)
示例#5
0
    def call(self, x):
        """Exponential linear unit.

        Arguments:
            x: array-like
                Input tensor.

        Returns:
            The exponential linear activation:
            `x` if `x >= 0` and
            `alpha * (exp(x)-1)` if `x < 0`.
        """
        return M.where(x >= 0, x, self.alpha * (M.exp(x) - 1))
示例#6
0
 def gradient(self, x):
     """Gradient of eLU activation function."""
     return M.where(x >= 0, 1, self.alpha * M.exp(x))