コード例 #1
0
    def __init__(self, inpt, nin, nunits, conv_sz=1, learn_init_state=True):
        # inpt is transposed a priori
        tablet_wd, _ = inpt.shape
        if conv_sz > 1:
            inpt_clipped = inpt[:conv_sz * (tablet_wd // conv_sz), :]
            inpt_conv = inpt_clipped.reshape(
                (tablet_wd // conv_sz, nin * conv_sz))
        else:
            inpt_conv = inpt

        wio = share(init_wts(nin * conv_sz, nunits))  # input to output
        woo = share(init_wts(nunits, nunits))  # output to output
        bo = share(init_wts(nunits))
        h0 = share(init_wts(nunits))

        def step(in_t, out_tm1):
            return tt.tanh(tt.dot(out_tm1, woo) + tt.dot(in_t, wio) + bo)

        self.output, _ = theano.scan(step,
                                     sequences=[inpt_conv],
                                     outputs_info=[h0])

        self.params = [wio, woo, bo]
        if learn_init_state:
            self.params += [h0]
        self.nout = nunits
コード例 #2
0
ファイル: recurrent.py プロジェクト: Neuroschemata/Toy-RNN
    def __init__(self, inpt, nin, nunits, conv_sz=1,
                 learn_init_state=True):
        # inpt is transposed a priori
        tablet_wd, _ = inpt.shape
        if conv_sz > 1:
            inpt_clipped = inpt[:conv_sz * (tablet_wd // conv_sz), :]
            inpt_conv = inpt_clipped.reshape(
                (tablet_wd // conv_sz, nin * conv_sz))
        else:
            inpt_conv = inpt

        wio = share(init_wts(nin * conv_sz, nunits))  # input to output
        woo = share(init_wts(nunits, nunits))  # output to output
        bo = share(init_wts(nunits))
        h0 = share(init_wts(nunits))

        def step(in_t, out_tm1):
            return TT.tanh(TT.dot(out_tm1, woo) + TT.dot(in_t, wio) + bo)

        self.output, _ = theano.scan(
            step,
            sequences=[inpt_conv],
            outputs_info=[h0]
        )

        self.params = [wio, woo, bo]
        if learn_init_state:
            self.params += [h0]
        self.nout = nunits
コード例 #3
0
 def __init__(self, inpt, in_sz, n_classes, tied=False):
     if tied:
         b = share(init_wts(n_classes-1))
         w = share(init_wts(in_sz, n_classes-1))
         w1 = TT.horizontal_stack(w, TT.zeros((in_sz, 1)))
         b1 = TT.concatenate((b, TT.zeros(1)))
         self.output = TT.dot(inpt, w1) + b1
     else:
         b = share(init_wts(n_classes))
         w = share(init_wts(in_sz, n_classes))
         self.output = TT.dot(inpt, w) + b
     self.params = [w, b]
コード例 #4
0
ファイル: outlayers.py プロジェクト: wenmengzhou/rnn_ctc
 def __init__(self, inpt, in_sz, n_classes, tied=False):
     if tied:
         b = share(init_wts(n_classes-1))
         w = share(init_wts(in_sz, n_classes-1))
         w1 = tt.horizontal_stack(w, tt.zeros((in_sz, 1)))
         b1 = tt.concatenate((b, tt.zeros(1)))
         self.output = tt.dot(inpt, w1) + b1
     else:
         b = share(init_wts(n_classes))
         w = share(init_wts(in_sz, n_classes))
         self.output = tt.dot(inpt, w) + b
     self.params = [w, b]