Example #1
0
    def __init__(self, num_input, num_cells, input_layer=None, name=""):
        """
        LSTM Layer
        Takes as input sequence of inputs, returns sequence of outputs
        """

        self.name = name
        self.num_input = num_input
        self.num_cells = num_cells

        self.X = input_layer.output()
        self.h0 = theano.shared(floatX(np.zeros(num_cells)))
        self.s0 = theano.shared(floatX(np.zeros(num_cells)))

        self.W_gx = random_weights((num_input, num_cells))
        self.W_ix = random_weights((num_input, num_cells))
        self.W_fx = random_weights((num_input, num_cells))
        self.W_ox = random_weights((num_input, num_cells))

        self.W_gh = random_weights((num_cells, num_cells))
        self.W_ih = random_weights((num_cells, num_cells))
        self.W_fh = random_weights((num_cells, num_cells))
        self.W_oh = random_weights((num_cells, num_cells))

        self.b_g = zeros(num_cells)
        self.b_i = zeros(num_cells)
        self.b_f = zeros(num_cells)
        self.b_o = zeros(num_cells)

        self.params = [self.W_gx, self.W_ix, self.W_ox, self.W_fx,
                        self.W_gh, self.W_ih, self.W_oh, self.W_fh,
                        self.b_g, self.b_i, self.b_f, self.b_o,
                ]
Example #2
0
    def __init__(self, num_input, num_cells, input_layers=None, name="", go_backwards=False):
        """
        GRU Layer
        Takes as input sequence of inputs, returns sequence of outputs
        """

        self.name = name
        self.num_input = num_input
        self.num_cells = num_cells

        if len(input_layers) >= 2:
            self.X = T.concatenate([input_layer.output() for input_layer in input_layers], axis=1)
        else:
            self.X = input_layers[0].output()

        self.s0 = zeros(num_cells)
        self.go_backwards = go_backwards

        self.U_z = random_weights((num_input, num_cells), name=self.name+"U_z")
        self.W_z = random_weights((num_cells, num_cells), name=self.name+"W_z")
        self.U_r = random_weights((num_input, num_cells), name=self.name+"U_r")
        self.W_r = random_weights((num_cells, num_cells), name=self.name+"W_r")
        self.U_h = random_weights((num_input, num_cells), name=self.name+"U_h")
        self.W_h = random_weights((num_cells, num_cells), name=self.name+"W_h")
        self.b_z = zeros(num_cells, name=self.name+"b_z")
        self.b_r = zeros(num_cells, name=self.name+"b_r")
        self.b_h = zeros(num_cells, name=self.name+"b_h")

        self.params = [ self.U_z, self.W_z, self.U_r,
                        self.W_r, self.U_h, self.W_h,
                        self.b_z, self.b_r, self.b_h
                    ]

        self.output()
Example #3
0
    def __init__(self, num_input, num_cells, input_layer=None, name=""):
        """
        LSTM Layer
        Takes as input sequence of inputs, returns sequence of outputs
        """

        self.name = name
        self.num_input = num_input
        self.num_cells = num_cells

        self.X = input_layer.output()
        self.h0 = theano.shared(floatX(np.zeros(num_cells)))
        self.s0 = theano.shared(floatX(np.zeros(num_cells)))

        self.W_gx = random_weights((num_input, num_cells))
        self.W_ix = random_weights((num_input, num_cells))
        self.W_fx = random_weights((num_input, num_cells))
        self.W_ox = random_weights((num_input, num_cells))

        self.W_gh = random_weights((num_cells, num_cells))
        self.W_ih = random_weights((num_cells, num_cells))
        self.W_fh = random_weights((num_cells, num_cells))
        self.W_oh = random_weights((num_cells, num_cells))

        self.b_g = zeros(num_cells)
        self.b_i = zeros(num_cells)
        self.b_f = zeros(num_cells)
        self.b_o = zeros(num_cells)

        self.params = [self.W_gx, self.W_ix, self.W_ox, self.W_fx,
                        self.W_gh, self.W_ih, self.W_oh, self.W_fh,
                        self.b_g, self.b_i, self.b_f, self.b_o,
                ]
Example #4
0
    def __init__(self, num_input, num_cells, input_layer=None, name=""):
        """
        LSTM Layer
        Takes as input sequence of inputs, returns sequence of outputs

        Currently takes only one input layer
        """
        self.name = name
        self.num_input = num_input
        self.num_cells = num_cells

        #Setting the X as the input layer
        self.X = input_layer.output()

        self.h0 = theano.shared(floatX(np.zeros((1, num_cells))))
        self.s0 = theano.shared(floatX(np.zeros((1, num_cells))))

        #Initializing the weights
        self.W_gx = random_weights((num_input, num_cells),
                                   name=self.name + "W_gx")
        self.W_ix = random_weights((num_input, num_cells),
                                   name=self.name + "W_ix")
        self.W_fx = random_weights((num_input, num_cells),
                                   name=self.name + "W_fx")
        self.W_ox = random_weights((num_input, num_cells),
                                   name=self.name + "W_ox")

        self.W_gh = random_weights((num_cells, num_cells),
                                   name=self.name + "W_gh")
        self.W_ih = random_weights((num_cells, num_cells),
                                   name=self.name + "W_ih")
        self.W_fh = random_weights((num_cells, num_cells),
                                   name=self.name + "W_fh")
        self.W_oh = random_weights((num_cells, num_cells),
                                   name=self.name + "W_oh")

        self.b_g = zeros(num_cells, name=self.name + "b_g")
        self.b_i = zeros(num_cells, name=self.name + "b_i")
        self.b_f = zeros(num_cells, name=self.name + "b_f")
        self.b_o = zeros(num_cells, name=self.name + "b_o")

        self.params = [
            self.W_gx,
            self.W_ix,
            self.W_ox,
            self.W_fx,
            self.W_gh,
            self.W_ih,
            self.W_oh,
            self.W_fh,
            self.b_g,
            self.b_i,
            self.b_f,
            self.b_o,
        ]

        self.output()
Example #5
0
 def __init__(self, num_input, num_output, input_layer, name=""):
     self.num_input = num_input
     self.num_output = num_output
     self.X = input_layer.output()
     self.W_yh = random_weights((num_input, num_output),name="W_yh")
     self.b_y = zeros(num_output, name="b_y")
     self.params = [self.W_yh, self.b_y]
Example #6
0
 def __init__(self, num_input, num_output, input_layer, name=""):
     self.num_input = num_input
     self.num_output = num_output
     self.X = input_layer.output()
     self.W_yh = random_weights((num_input, num_output), name="W_yh")
     self.b_y = zeros(num_output, name="b_y")
     self.params = [self.W_yh, self.b_y]
Example #7
0
    def __init__(self,
                 num_input,
                 num_cells,
                 input_layers=None,
                 name="",
                 go_backwards=False):
        """
        GRU Layer
        Takes as input sequence of inputs, returns sequence of outputs
        """

        self.name = name
        self.num_input = num_input
        self.num_cells = num_cells

        if len(input_layers) >= 2:
            self.X = T.concatenate(
                [input_layer.output() for input_layer in input_layers], axis=1)
        else:
            self.X = input_layers[0].output()

        self.s0 = zeros(num_cells)
        self.go_backwards = go_backwards

        self.U_z = random_weights((num_input, num_cells),
                                  name=self.name + "U_z")
        self.W_z = random_weights((num_cells, num_cells),
                                  name=self.name + "W_z")
        self.U_r = random_weights((num_input, num_cells),
                                  name=self.name + "U_r")
        self.W_r = random_weights((num_cells, num_cells),
                                  name=self.name + "W_r")
        self.U_h = random_weights((num_input, num_cells),
                                  name=self.name + "U_h")
        self.W_h = random_weights((num_cells, num_cells),
                                  name=self.name + "W_h")
        self.b_z = zeros(num_cells, name=self.name + "b_z")
        self.b_r = zeros(num_cells, name=self.name + "b_r")
        self.b_h = zeros(num_cells, name=self.name + "b_h")

        self.params = [
            self.U_z, self.W_z, self.U_r, self.W_r, self.U_h, self.W_h,
            self.b_z, self.b_r, self.b_h
        ]

        self.output()
Example #8
0
    def __init__(self, num_input, num_output, input_layers, name=""):

        if len(input_layers) >= 2:
            self.X = T.concatenate([input_layer.output() for input_layer in input_layers], axis=1)
        else:
            self.X = input_layers[0].output()
        self.W_yh = random_weights((num_input, num_output),name="W_yh")
        self.b_y = zeros(num_output, name="b_y")
        self.params = [self.W_yh, self.b_y]
Example #9
0
    def __init__(self, num_input, num_output, input_layer, temperature=1.0, name=""):
        self.name = ""
        self.X = input_layer.output()
        self.params = []
        self.temp = temperature
        self.W_yh = random_weights((num_input, num_output))
        self.b_y = zeros(num_output)

        self.params = [self.W_yh, self.b_y]
Example #10
0
    def __init__(self, num_input, num_output, input_layer, temperature=1.0, name=""):
        self.name = ""
        self.X = input_layer.output()
        self.params = []
        self.temp = temperature

        self.W_yh = random_weights((num_input, num_output))
        self.b_y = zeros(num_output)

        self.params = [self.W_yh, self.b_y]
Example #11
0
    def __init__self(self, num_input, num_output, input_layer, name=""):

        self.X = input_layer.output()
        self.num_input = num_input
        self.num_output = num_output

        self.W = random_weights((num_input, num_output))
        self.b = zeros(num_output)

        self.params = [self.W, self.B]
Example #12
0
    def __init__(self, num_input, num_output, input_layers, name=""):

        if len(input_layers) >= 2:
            self.X = T.concatenate(
                [input_layer.output() for input_layer in input_layers], axis=1)
        else:
            self.X = input_layers[0].output()
        self.W_yh = random_weights((num_input, num_output), name="W_yh")
        self.b_y = zeros(num_output, name="b_y")
        self.params = [self.W_yh, self.b_y]
Example #13
0
    def __init__self(self, num_input, num_output, input_layer, name=""):

        self.X = input_layer.output()
        self.num_input = num_input
        self.num_output = num_output

        self.W = random_weights((num_input, num_output))
        self.b = zeros(num_output)

        self.params = [self.W, self.B]
Example #14
0
    def __init__(self, num_input, num_output, input_layers, name=""):

        if len(input_layers) >= 2:
            print "number of input layers: %s" % len(input_layers)
            print "len of list comprehension: %s" % len([input_layer.output() for input_layer in input_layers])
            self.X = T.concatenate([input_layer.output() for input_layer in input_layers], axis=1)
        else:
            self.X = input_layers[0].output()
        self.W_yh = random_weights((num_input, num_output),name="W_yh")
        self.b_y = zeros(num_output, name="b_y")
        self.params = [self.W_yh, self.b_y]
Example #15
0
    def __init__(self, num_input, num_cells, input_layers=None, name="", go_backwards=False):
        """
        LSTM Layer
        Takes as input sequence of inputs, returns sequence of outputs
        """

        self.name = name
        self.num_input = num_input
        self.num_cells = num_cells

        if len(input_layers) >= 2:
            self.X = T.concatenate([input_layer.output() for input_layer in input_layers], axis=1)
        else:
            self.X = input_layers[0].output()

        self.h0 = theano.shared(floatX(np.zeros(num_cells)))
        self.s0 = theano.shared(floatX(np.zeros(num_cells)))

        self.go_backwards = go_backwards

        self.W_gx = random_weights((num_input, num_cells), name=self.name+"W_gx")
        self.W_ix = random_weights((num_input, num_cells), name=self.name+"W_ix")
        self.W_fx = random_weights((num_input, num_cells), name=self.name+"W_fx")
        self.W_ox = random_weights((num_input, num_cells), name=self.name+"W_ox")

        self.W_gh = random_weights((num_cells, num_cells), name=self.name+"W_gh")
        self.W_ih = random_weights((num_cells, num_cells), name=self.name+"W_ih")
        self.W_fh = random_weights((num_cells, num_cells), name=self.name+"W_fh")
        self.W_oh = random_weights((num_cells, num_cells), name=self.name+"W_oh")

        self.b_g = zeros(num_cells, name=self.name+"b_g")
        self.b_i = zeros(num_cells, name=self.name+"b_i")
        self.b_f = zeros(num_cells, name=self.name+"b_f")
        self.b_o = zeros(num_cells, name=self.name+"b_o")

        self.params = [self.W_gx, self.W_ix, self.W_ox, self.W_fx,
                        self.W_gh, self.W_ih, self.W_oh, self.W_fh,
                        self.b_g, self.b_i, self.b_f, self.b_o,
                ]

        self.output()
Example #16
0
    def __init__(self, num_input, num_cells, input_layer=None, name=""):
        """
        LSTM Layer
        Takes as input sequence of inputs, returns sequence of outputs

        Currently takes only one input layer
        """
        self.name = name
        self.num_input = num_input
        self.num_cells = num_cells

        #Setting the X as the input layer
        self.X = input_layer.output()

        self.h0 = theano.shared(floatX(np.zeros((1, num_cells))))
        self.s0 = theano.shared(floatX(np.zeros((1, num_cells))))


        #Initializing the weights
        self.W_gx = random_weights((num_input, num_cells), name=self.name+"W_gx")
        self.W_ix = random_weights((num_input, num_cells), name=self.name+"W_ix")
        self.W_fx = random_weights((num_input, num_cells), name=self.name+"W_fx")
        self.W_ox = random_weights((num_input, num_cells), name=self.name+"W_ox")

        self.W_gh = random_weights((num_cells, num_cells), name=self.name+"W_gh")
        self.W_ih = random_weights((num_cells, num_cells), name=self.name+"W_ih")
        self.W_fh = random_weights((num_cells, num_cells), name=self.name+"W_fh")
        self.W_oh = random_weights((num_cells, num_cells), name=self.name+"W_oh")

        self.b_g = zeros(num_cells, name=self.name+"b_g")
        self.b_i = zeros(num_cells, name=self.name+"b_i")
        self.b_f = zeros(num_cells, name=self.name+"b_f")
        self.b_o = zeros(num_cells, name=self.name+"b_o")

        self.params = [self.W_gx, self.W_ix, self.W_ox, self.W_fx,
                        self.W_gh, self.W_ih, self.W_oh, self.W_fh,
                        self.b_g, self.b_i, self.b_f, self.b_o,]

        self.output()
Example #17
0
    def __init__(self, num_input, num_output, input_layers, name=""):

        if len(input_layers) >= 2:
            print "number of input layers: %s" % len(input_layers)
            print "len of list comprehension: %s" % len(
                [input_layer.output() for input_layer in input_layers])
            self.X = T.concatenate(
                [input_layer.output() for input_layer in input_layers], axis=1)
        else:
            self.X = input_layers[0].output()
        self.W_yh = random_weights((num_input, num_output), name="W_yh")
        self.b_y = zeros(num_output, name="b_y")
        self.params = [self.W_yh, self.b_y]
Example #18
0
 def reset_state(self):
     self.W_yh = random_weights((self.num_input, self.num_output),
                                name="W_yh")
     self.b_y = zeros(self.num_output, name="b_y")
     self.params = [self.W_yh, self.b_y]
Example #19
0
 def reset_state(self):
     self.W_yh = random_weights((self.num_input, self.num_output),name="W_yh")
     self.b_y = zeros(self.num_output, name="b_y")
     self.params = [self.W_yh, self.b_y]
Example #20
0
 def reset_state(self):
     self.s0 = zeros(self.num_cells)
Example #21
0
    def __init__(self,
                 num_input,
                 num_cells,
                 input_layers=None,
                 name="",
                 go_backwards=False):
        """
        LSTM Layer
        Takes as input sequence of inputs, returns sequence of outputs
        """

        self.name = name
        self.num_input = num_input
        self.num_cells = num_cells

        if len(input_layers) >= 2:
            self.X = T.concatenate(
                [input_layer.output() for input_layer in input_layers], axis=1)
        else:
            self.X = input_layers[0].output()

        self.h0 = theano.shared(floatX(np.zeros(num_cells)))
        self.s0 = theano.shared(floatX(np.zeros(num_cells)))

        self.go_backwards = go_backwards

        self.W_gx = random_weights((num_input, num_cells),
                                   name=self.name + "W_gx")
        self.W_ix = random_weights((num_input, num_cells),
                                   name=self.name + "W_ix")
        self.W_fx = random_weights((num_input, num_cells),
                                   name=self.name + "W_fx")
        self.W_ox = random_weights((num_input, num_cells),
                                   name=self.name + "W_ox")

        self.W_gh = random_weights((num_cells, num_cells),
                                   name=self.name + "W_gh")
        self.W_ih = random_weights((num_cells, num_cells),
                                   name=self.name + "W_ih")
        self.W_fh = random_weights((num_cells, num_cells),
                                   name=self.name + "W_fh")
        self.W_oh = random_weights((num_cells, num_cells),
                                   name=self.name + "W_oh")

        self.b_g = zeros(num_cells, name=self.name + "b_g")
        self.b_i = zeros(num_cells, name=self.name + "b_i")
        self.b_f = zeros(num_cells, name=self.name + "b_f")
        self.b_o = zeros(num_cells, name=self.name + "b_o")

        self.params = [
            self.W_gx,
            self.W_ix,
            self.W_ox,
            self.W_fx,
            self.W_gh,
            self.W_ih,
            self.W_oh,
            self.W_fh,
            self.b_g,
            self.b_i,
            self.b_f,
            self.b_o,
        ]

        self.output()
Example #22
0
 def reset_state(self):
     self.s0 = zeros(self.num_cells)