def __init__(self): ActivationFunction.__init__( self, lambda x: x*math.sin(x), # This could be optimized as the derivative # will always be called after the fn. # And will be called with the same x value. lambda x: x*math.cos(x) + math.sin(x))
def __init__(self): ActivationFunction.__init__( self, lambda x: math.tanh(x), # This could be optimized as the derivative # will always be called after the fn. # And will be called with the same x value. lambda x: 1 - math.pow(math.tanh(x), 2))
def __init__(self): ActivationFunction.__init__( self, lambda x: sigmoid(x), # This could be optimized as the derivative # will always be called after the fn. # And will be called with the same x value. lambda x: sigmoid(x) * (1 - sigmoid(x)))
def __init__(self): ActivationFunction.__init__( self, lambda x: x, lambda x: 1.0)