예제 #1
0
    def __init__(self,
                 hidden_dim,
                 activation='tanh',
                 inner_init='orthogonal',
                 parameters=None,
                 return_sequences=True):
        self.return_sequences = return_sequences
        self.hidden_dim = hidden_dim
        self.inner_init = get_initializer(inner_init)
        self.activation = get_activation(activation)
        self.activation_d = elementwise_grad(self.activation)
        self.sigmoid_d = elementwise_grad(sigmoid)

        if parameters is None:
            self._params = Parameters()
        else:
            self._params = parameters

        self.last_input = None
        self.states = None
        self.outputs = None
        self.gates = None
        self.hprev = None
        self.input_dim = None
        self.W = None
        self.U = None
예제 #2
0
    def __init__(self, init='glorot_uniform', scale=0.5, bias=1.0, regularizers=None, constraints=None):
        """A container for layer's parameters.

        Parameters
        ----------
        init : str, default 'glorot_uniform'.
            The name of the weight initialization function.
        scale : float, default 0.5
        bias : float, default 1.0
            Initial values for bias.
        regularizers : dict
            Weight regularizers.
            {'W' : L2()}
        constraints : dict
            Weight constraints. {'b' : MaxNorm()}
        """
        if constraints is None:
            self.constraints = {}
        else:
            self.constraints = constraints

        if regularizers is None:
            self.regularizers = {}
        else:
            self.regularizers = regularizers

        self.initial_bias = bias
        self.scale = scale
        self.init = get_initializer(init)

        self._params = {}
        self._grads = {}
예제 #3
0
    def __init__(self, init='glorot_uniform', scale=0.5, bias=1.0, regularizers=None, constraints=None):
        """A container for layer's parameters.

        Parameters
        ----------
        init : str, default 'glorot_uniform'.
            The name of the weight initialization function.
        scale : float, default 0.5
        bias : float, default 1.0
            Initial values for bias.
        regularizers : dict
            Weight regularizers.
            >>> {'W' : L2()}
        constraints : dict
            Weight constraints.
            >>> {'b' : MaxNorm()}
        """
        if constraints is None:
            self.constraints = {}
        else:
            self.constraints = constraints

        if regularizers is None:
            self.regularizers = {}
        else:
            self.regularizers = regularizers

        self.initial_bias = bias
        self.scale = scale
        self.init = get_initializer(init)

        self._params = {}
        self._grads = {}
예제 #4
0
 def __init__(self, hidden_dim, activation='tanh', inner_init='orthogonal', parameters=None, return_sequences=True):
     self.return_sequences = return_sequences
     self.hidden_dim = hidden_dim
     self.inner_init = get_initializer(inner_init)
     self.activation = get_activation(activation)
     self.activation_d = elementwise_grad(self.activation)
     if parameters is None:
         self._params = Parameters()
     else:
         self._params = parameters
     self.last_input = None
     self.states = None
     self.hprev = None
     self.input_dim = None