Esempio n. 1
0
class TanhLayer(Layer):
    """ Layer with `tanh` activation function.

    Parameters
    ----------
    function_coef : dict
        Default configurations for sigmoid activation function. There is one
        value name ``alpha`` (default is ``1``). `alpha` control your
        function shape.
    {layer_params}
    """
    function_coef = DictProperty(default={'alpha': 1})
    activation_function = tanh
Esempio n. 2
0
class TanhLayer(Layer):
    """ The layer with the `tanh` activation function.

    Parameters
    ----------
    function_coef : dict
        The default configurations for the sigmoid activation function.
        There is one available parameter ``alpha`` (defaults to ``1``).
        Parameter `alpha` controls function shape.
    {layer_params}
    """
    function_coef = DictProperty(default={'alpha': 1})
    activation_function = tanh
Esempio n. 3
0
class SoftmaxLayer(Layer):
    """ Layer with softmax activation function.

    Parameters
    ----------
    function_coef : dict
        Default configurations for softmax activation function. There is one
        value name ``temp`` (default is ``1``). Smaller ``temp`` value will
        make your winner probability closer to ``1``. To big ``temp`` value
        will make all your probabilities closer to equal values.
    {layer_params}
    """
    function_coef = DictProperty(default={'temp': 1})
    activation_function = softmax
Esempio n. 4
0
class SoftmaxLayer(Layer):
    """ The layer with the softmax activation function.

    Parameters
    ----------
    function_coef : dict
        The default configurations for the softmax activation function.
        There is one available parameter ``temp`` (defaults to ``1``).
        Lower ``temp`` value will make your winner probability closer
        to ``1``. Higher ``temp`` value will make all probabilities
        values equal to each other.
    {layer_params}
    """
    function_coef = DictProperty(default={'temp': 1})
    activation_function = softmax
Esempio n. 5
0
class Layer(BaseLayer):
    """ Base class for neural network layers.

    Parameters
    ----------
    function_coef : dict
        Default settings for activation function.
    {layer_params}
    """

    function_coef = DictProperty()

    def __init__(self, *args, **kwargs):
        super(Layer, self).__init__(*args, **kwargs)

        if self.function_coef is not None:
            partial_func = get_partial_for_func(self.activation_function)
            self.activation_function = partial_func(self.activation_function,
                                                    **self.function_coef)