Esempio n. 1
0
    def __init__(self,
                 input_dim,
                 hidden_dim,
                 init='glorot_uniform',
                 activation='linear',
                 weights=None):
        nvis = input_dim
        nhid = hidden_dim
        W_shape = nhid, nvis
        lim = np.sqrt(6. / (2 * nvis + 1))
        W_init = np.random.uniform(-lim, lim, W_shape)
        W = theano.shared(W_init)

        hbias = theano.shared(np.zeros((nhid, 1)), broadcastable=[False, True])

        self.init = initializations.get(init)
        self.activation = activations.get(activation)
        self.input_dim = input_dim

        self.hidden_dim = hidden_dim
        self.output_dim = input_dim

        self.input = T.matrix()

        #maybe need to replace the initialization function

        self.W = self.init((self.input_dim, self.hidden_dim))
        self.b = shared_zeros((self.hidden_dim))
        #self.b_tilde = shared_zeros((self.input_dim))

        self.params = [self.W, self.b]

        if weights is not None:
            self.set_weights(weights)
Esempio n. 2
0
    def __init__(self, input_dim, hidden_dim, init='glorot_uniform', activation='linear', weights=None):
        nvis = input_dim
        nhid = hidden_dim
        W_shape = nhid,nvis
        lim=np.sqrt(6./(2*nvis+1))
        W_init=np.random.uniform(-lim,lim,W_shape)
        W=theano.shared(W_init)

        hbias=theano.shared(np.zeros((nhid,1)),broadcastable=[False,True])

        self.init = initializations.get(init)
        self.activation = activations.get(activation)
        self.input_dim = input_dim
        
        self.hidden_dim = hidden_dim
        self.output_dim = input_dim

        self.input = T.matrix()

        #maybe need to replace the initialization function

        self.W = self.init((self.input_dim, self.hidden_dim))
        self.b = shared_zeros((self.hidden_dim))
        #self.b_tilde = shared_zeros((self.input_dim))

        self.params = [self.W, self.b]

        if weights is not None:
            self.set_weights(weights)
    def __init__(self,
                 activation='tanh',
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 **kwargs):

        super(MyLayer_one, self).__init__(**kwargs)
        self.activation = activations.get(activation)
        self.kernel_initializer = kernel_initializer
        self.bias_initializer = bias_initializer
Esempio n. 4
0
    def __init__(self,
                 units,
                 activation='tanh',
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 **kwargs):

        super(binary_indicator_layer, self).__init__(**kwargs)
        self.activation = activations.get(activation)
        self.kernel_initializer = kernel_initializer
        self.bias_initializer = bias_initializer
        self.units = units
Esempio n. 5
0
    def __init__(self,
                 units,
                 classes,
                 activation='tanh',
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 **kwargs):

        super(target_representation_layer, self).__init__(**kwargs)
        self.activation = activations.get(activation)
        self.kernel_initializer = kernel_initializer
        self.bias_initializer = bias_initializer
        self.units = units
        self.classes = classes
Esempio n. 6
0
    def __init__(self,
                 input_dim,
                 proj_dim=128,
                 init='uniform',
                 activation='sigmoid',
                 weights=None):
        super(NodeContextProduct, self).__init__()
        self.input_dim = input_dim
        self.proj_dim = proj_dim
        self.init = initializations.get(init)
        self.activation = activations.get(activation)

        self.input = T.imatrix()
        # two different embeddings for pivot word and its context
        # because p(w|c) != p(c|w)
        self.W_w = self.init((input_dim, proj_dim))
        self.W_c = self.init((input_dim, proj_dim))

        self.params = [self.W_w, self.W_c]

        if weights is not None:
            self.set_weights(weights)
Esempio n. 7
0
 def __init__(self, activation0='tanh', activation1='softmax', kernel_initializer='glorot_uniform', bias_initializer='zeros', **kwargs):
     self.activation0 = activations.get(activation0)
     self.activation1 = activations.get(activation1)
     self.kernel_initializer = kernel_initializer
     self.bias_initializer = bias_initializer
     super(SelfAttentionScore, self).__init__(**kwargs)