Beispiel #1
0
    def __init__(self, inpt, nin, nunits, conv_sz=1,
                 learn_init_state=True):
        # inpt is transposed a priori
        tablet_wd, _ = inpt.shape
        if conv_sz > 1:
            inpt_clipped = inpt[:conv_sz * (tablet_wd // conv_sz), :]
            inpt_conv = inpt_clipped.reshape(
                (tablet_wd // conv_sz, nin * conv_sz))
        else:
            inpt_conv = inpt

        wio = share(init_wts(nin * conv_sz, nunits))  # input to output
        woo = share(init_wts(nunits, nunits))  # output to output
        bo = share(init_wts(nunits))
        h0 = share(init_wts(nunits))

        def step(in_t, out_tm1):
            return tt.tanh(tt.dot(out_tm1, woo) + tt.dot(in_t, wio) + bo)

        self.output, _ = theano.scan(
            step,
            sequences=[inpt_conv],
            outputs_info=[h0]
        )

        self.params = [wio, woo, bo]
        if learn_init_state:
            self.params += [h0]
        self.nout = nunits
Beispiel #2
0
 def __init__(self, inpt, in_sz, n_classes, ):
     b = share(init_wts(n_classes))
     w = share(init_wts(in_sz, n_classes))
     self.output = tt.nnet.softmax(tt.dot(inpt, w) + b)
     self.params = [w, b]