Exemple #1
0
    def __call__(self, x, **kwargs):
        """Applies the lstm layer.

        Args:
            x (~chainer.Variable): Time-Batch of input vectors.

        Returns:
            ~chainer.Variable: Output of the lstm layer.

        """

        dropout_rate = kwargs.get('dropout', 0.)
        dropout_rate_hidden_hidden = kwargs.get('dropout_hidden_hidden', 0.)
        x = dropout(x, dropout_rate)
        lstm_in = sequence_linear_function(x, self.W_x, self.b)
        if self.normalized:
            lstm_in = sequence_batch_normalization_function(
                lstm_in, self.gamma, self.beta)
        if self.stateful:
            c_prev = self.c_prev
            h_prev = self.h_prev
        else:
            c_prev = None
            h_prev = None
        lstm_out, self.h_prev, self.c_prev = \
            sequence_lstm_function(lstm_in, self.W_h, c_prev, h_prev,
                                   self.reverse, dropout_rate_hidden_hidden)
        return lstm_out
    def __call__(self, x, **kwargs):
        """Applies the lstm layer.

        Args:
            x (~chainer.Variable): Time-Batch of input vectors.

        Returns:
            ~chainer.Variable: Output of the lstm layer.

        """

        dropout_rate = kwargs.get('dropout', 0.)
        dropout_rate_hidden_hidden = kwargs.get('dropout_hidden_hidden', 0.)
        x = dropout(x, dropout_rate)
        lstm_in = sequence_linear_function(x, self.W_x, self.b)
        if self.normalized:
            lstm_in = sequence_batch_normalization_function(lstm_in, self.gamma,
                                                            self.beta)
        if self.stateful:
            c_prev = self.c_prev
            h_prev = self.h_prev
        else:
            c_prev = None
            h_prev = None
        lstm_out, self.h_prev, self.c_prev = \
            sequence_lstm_function(lstm_in, self.W_h, c_prev, h_prev,
                                   self.reverse, dropout_rate_hidden_hidden)
        return lstm_out
Exemple #3
0
    def __call__(self, x, **kwargs):
        """Applies the linear layer.

        Args:
            x (~chainer.Variable): Time-Batch of input vectors.

        Returns:
            ~chainer.Variable: Output of the linear layer.

        """

        dropout_rate = kwargs.get('dropout', 0.)
        x = dropout(x, dropout_rate)
        x = sequence_linear_function(x, self.W, self.b)
        if self.normalized:
            x = sequence_batch_normalization_function(x, self.gamma, self.beta)
        return x
    def __call__(self, x, **kwargs):
        """Applies the linear layer.

        Args:
            x (~chainer.Variable): Time-Batch of input vectors.

        Returns:
            ~chainer.Variable: Output of the linear layer.

        """

        dropout_rate = kwargs.get('dropout', 0.)
        x = dropout(x, dropout_rate)
        x = sequence_linear_function(x, self.W, self.b)
        if self.normalized:
            x = sequence_batch_normalization_function(x, self.gamma, self.beta)
        return x