Exemplo n.º 1
0
import numpy
import theano
import theano.tensor as T

from seq_to_seq.layers_core import Layer
from seq_to_seq import activations

softmax = activations.get('softmax')


class Softmax(Layer):
    """
    Softmax class.

    :param n_in: int
        The size of the input to the layer (i.e., the number of rows in the weight matrix).

    :param n_out: int
        The size of layer's output (i.e., the number of columns of the weight matrix and the bias
            vector). This is the size of the vector that will represent each of the inputs.

    :param previous_layer: Layer object
        The previous layer in the computational path.

    :param layer_number: int
        The layer position in the computational path.

    :param seed: int
        The seed to feed the random number generator.

    :param auto_setup: boolean
Exemplo n.º 2
0
import numpy
import theano
import theano.tensor as T

from seq_to_seq.layers_core import Layer
from seq_to_seq import activations

softmax = activations.get('softmax')
relu = activations.get('relu')


class Softmax(Layer):
    """
    Softmax class.

    :param n_in: int
        The size of the input to the layer (i.e., the number of rows in the weight matrix).

    :param n_out: int
        The size of layer's output (i.e., the number of columns of the weight matrix and the bias
            vector). This is the size of the vector that will represent each of the inputs.

    :param previous_layer: Layer object
        The previous layer in the computational path.

    :param layer_number: int
        The layer position in the computational path.

    :param seed: int
        The seed to feed the random number generator.
Exemplo n.º 3
0
import numpy
import theano
import theano.tensor as T

from seq_to_seq import activations
from seq_to_seq.layers_core import Layer

sigmoid = activations.get('sigmoid')
tanh = activations.get('tanh')


class RecurrentLayer(Layer):
    """
    Base class for recurrent layers.

    :param n_in: int
        The size of the input to the layer (i.e., the number of rows in the weight matrix).

    :param dim_proj: int
        The size of layer's output (i.e., the number of columns of the weight matrix and the bias
            vector). This is the size of the vector that will represent each of the inputs.

    :param previous_layer: Layer object
        The previous layer in the computational path.

    :param return_sequences: boolean
        Flag indicating whether or not to the layer should output the previous hidden states.

    :param layer_number: int
        The layer position in the computational path.
Exemplo n.º 4
0
import numpy
import theano
import theano.tensor as T

from seq_to_seq import activations
from seq_to_seq.layers_core import Layer

sigmoid = activations.get('sigmoid')
tanh = activations.get('tanh')
relu = activations.get('relu')


class RecurrentLayer(Layer):
    """
    Base class for recurrent layers.

    :param n_in: int
        The size of the input to the layer (i.e., the number of rows in the weight matrix).

    :param n_out: int
        The size of layer's output (i.e., the number of columns of the weight matrix and the bias
            vector). This is the size of the vector that will represent each of the inputs.

    :param previous_layer: Layer object
        The previous layer in the computational path.

    :param return_sequences: boolean
        Flag indicating whether or not to the layer should output the previous hidden states.

    :param layer_number: int
        The layer position in the computational path.