Ejemplo n.º 1
0
    def __init__(self, input_dim, output_dim, init='glorot_uniform', activation='tanh', name='Dense',
                 learn_bias=True, negative_bias=False):

        super(Dense, self).__init__()
        self.init = initializations.get(init)
        self.activation = activations.get(activation)
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.linear = (activation == 'linear')

        # self.input = T.matrix()
        self.W = self.init((self.input_dim, self.output_dim))
        if not negative_bias:
            self.b = shared_zeros((self.output_dim))
        else:
            self.b = shared_ones((self.output_dim))

        self.learn_bias = learn_bias
        if self.learn_bias:
            self.params = [self.W, self.b]
        else:
            self.params = [self.W]

        if name is not None:
            self.set_name(name)
Ejemplo n.º 2
0
    def __init__(self,
                 input_dim1,
                 input_dim2,
                 output_dim,
                 init='glorot_uniform',
                 activation='tanh',
                 name='Dense',
                 learn_bias=True):

        super(Dense2, self).__init__()
        self.init = initializations.get(init)
        self.activation = activations.get(activation)
        self.input_dim1 = input_dim1
        self.input_dim2 = input_dim2
        self.output_dim = output_dim
        self.linear = (activation == 'linear')

        # self.input = T.matrix()

        self.W1 = self.init((self.input_dim1, self.output_dim))
        self.W2 = self.init((self.input_dim2, self.output_dim))
        self.b = shared_zeros((self.output_dim))

        self.learn_bias = learn_bias
        if self.learn_bias:
            self.params = [self.W1, self.W2, self.b]
        else:
            self.params = [self.W1, self.W2]

        if name is not None:
            self.set_name(name)
Ejemplo n.º 3
0
    def __init__(self, input_dim, output_dim, init='uniform', name=None):

        super(Embedding, self).__init__()
        self.init = initializations.get(init)
        self.input_dim = input_dim
        self.output_dim = output_dim

        self.W = self.init((self.input_dim, self.output_dim))

        self.params = [self.W]

        if name is not None:
            self.set_name(name)
Ejemplo n.º 4
0
    def __init__(self, input_dim, output_dim, init=None, activation='tanh', name='Bias'):

        super(Constant, self).__init__()
        assert input_dim == output_dim, 'Bias Layer needs to have the same input/output nodes.'

        self.init = initializations.get(init)
        self.activation = activations.get(activation)
        self.input_dim = input_dim
        self.output_dim = output_dim

        self.b = shared_zeros(self.output_dim)
        self.params = [self.b]

        if name is not None:
            self.set_name(name)
Ejemplo n.º 5
0
    def __init__(self, input_dim, input_wdth, init='glorot_uniform',
                 activation='tanh', name='Bias', has_input=True):
        super(MemoryLinear, self).__init__()

        self.init       = initializations.get(init)
        self.activation = activations.get(activation)
        self.input_dim  = input_dim
        self.input_wdth = input_wdth

        self.b = self.init((self.input_dim, self.input_wdth))
        self.params = [self.b]

        if has_input:
            self.P = self.init((self.input_dim, self.input_wdth))
            self.params += [self.P]

        if name is not None:
            self.set_name(name)