Beispiel #1
0
def softsign_activation(x, **kwargs):
    # See X. Glorot and Y.
    # Bengio. Understanding the difficulty of training deep feedforward neural
    # networks. In Proceedings of the 13th International Workshop on
    # Artificial Intelligence and Statistics, 2010.
    #   - http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf
    # This returns the element-wise value of:
    # x / ( 1 + |x| )
    # This has a slope = 1 / ( 1 + |x| )^2
    # Thus softsign also asymptotically approches slope of 0 for both large positive and negative values, but the drop-
    # off is quadratic rather than exponential .
    return(T_softsign(x))
Beispiel #2
0
 def softsign(self, x):
     return T_softsign(x)
Beispiel #3
0
def softsign(x):
    return T_softsign(x)