def __init__(self, num_input, num_output, input_layers, name=""): self.name = name if len(input_layers) >= 2: print "number of input layers: %s" % len(input_layers) print "len of list comprehension: %s" % len([input_layer.output() for input_layer in input_layers]) self.X = T.concatenate([input_layer.output() for input_layer in input_layers], axis=1) else: self.X = input_layers[0].output() self.W_yh = random_weights((num_input, num_output),name=name+"_W_yh") self.b_y = random_weights((num_output,), name=name+"_b_y") self.params = [self.W_yh] self.bias = [self.b_y]
def __init__(self, num_input, num_cells, input_layer=None, name=""): """ LSTM Layer Takes as input sequence of inputs, returns sequence of outputs """ self.name = name self.num_input = num_input self.num_cells = num_cells self.X = input_layer.output() self.h0 = theano.shared(floatX(np.zeros(num_cells))) self.s0 = theano.shared(floatX(np.zeros(num_cells))) self.W_gx = random_weights((num_input, num_cells)) self.W_ix = random_weights((num_input, num_cells)) self.W_fx = random_weights((num_input, num_cells)) self.W_ox = random_weights((num_input, num_cells)) self.W_gh = random_weights((num_cells, num_cells)) self.W_ih = random_weights((num_cells, num_cells)) self.W_fh = random_weights((num_cells, num_cells)) self.W_oh = random_weights((num_cells, num_cells)) self.b_g = zeros(num_cells) self.b_i = zeros(num_cells) self.b_f = zeros(num_cells) self.b_o = zeros(num_cells) self.params = [self.W_gx, self.W_ix, self.W_ox, self.W_fx, self.W_gh, self.W_ih, self.W_oh, self.W_fh, self.b_g, self.b_i, self.b_f, self.b_o, ]
def __init__(self, num_input, num_output, input_layers, name=""): self.name = name if len(input_layers) >= 2: print "number of input layers: %s" % len(input_layers) print "len of list comprehension: %s" % len( [input_layer.output() for input_layer in input_layers]) self.X = T.concatenate( [input_layer.output() for input_layer in input_layers], axis=1) else: self.X = input_layers[0].output() self.W_yh = random_weights((num_input, num_output), name=name + "_W_yh") self.b_y = random_weights((num_output, ), name=name + "_b_y") self.params = [self.W_yh] self.bias = [self.b_y]
def __init__(self, input_layer, filter_shape, subsample=(1, 1), name="", border_mode='valid'): self.X = input_layer.output() self.border_mode = border_mode self.name = name self.subsample = subsample self.filter_shape = filter_shape fan_in = np.prod(filter_shape[1:]) fan_out = (filter_shape[0] * np.prod(filter_shape[2:]) / np.prod(subsample)) W_bound = np.sqrt(6. / (fan_in + fan_out)) rng = np.random.RandomState(23455) self.W = theano.shared(np.asarray(rng.uniform(low=-W_bound, high=W_bound, size=filter_shape), dtype=theano.config.floatX), name=name + " W", borrow=True) self.b = random_weights((filter_shape[0], ), name=name + " bias") self.params = [self.W] self.bias = [self.b] self.get_weight = theano.function([], self.W)
def __init__(self, num_input, num_output, input_layer, name=""): self.num_input = num_input self.num_output = num_output self.X = input_layer.output() self.W_yh = random_weights((num_input, num_output),name="W_yh") self.b_y = zeros(num_output, name="b_y") self.params = [self.W_yh, self.b_y]
def __init__(self, num_input, num_output, input_layer, name=""): self.num_input = num_input self.num_output = num_output self.X = input_layer.output() self.W_yh = random_weights((num_input, num_output), name="W_yh") self.b_y = zeros(num_output, name="b_y") self.params = [self.W_yh, self.b_y]
def __init__(self, num_input, num_cells, input_layer=None, name=""): """ LSTM Layer Takes as input sequence of inputs, returns sequence of outputs Currently takes only one input layer """ self.name = name self.num_input = num_input self.num_cells = num_cells #Setting the X as the input layer self.X = input_layer.output() self.h0 = theano.shared(floatX(np.zeros((1, num_cells)))) self.s0 = theano.shared(floatX(np.zeros((1, num_cells)))) #Initializing the weights self.W_gx = random_weights((num_input, num_cells), name=self.name + "W_gx") self.W_ix = random_weights((num_input, num_cells), name=self.name + "W_ix") self.W_fx = random_weights((num_input, num_cells), name=self.name + "W_fx") self.W_ox = random_weights((num_input, num_cells), name=self.name + "W_ox") self.W_gh = random_weights((num_cells, num_cells), name=self.name + "W_gh") self.W_ih = random_weights((num_cells, num_cells), name=self.name + "W_ih") self.W_fh = random_weights((num_cells, num_cells), name=self.name + "W_fh") self.W_oh = random_weights((num_cells, num_cells), name=self.name + "W_oh") self.b_g = zeros(num_cells, name=self.name + "b_g") self.b_i = zeros(num_cells, name=self.name + "b_i") self.b_f = zeros(num_cells, name=self.name + "b_f") self.b_o = zeros(num_cells, name=self.name + "b_o") self.params = [ self.W_gx, self.W_ix, self.W_ox, self.W_fx, self.W_gh, self.W_ih, self.W_oh, self.W_fh, self.b_g, self.b_i, self.b_f, self.b_o, ] self.output()
def __init__(self, num_input, num_output, input_layers, name=""): if len(input_layers) >= 2: self.X = T.concatenate([input_layer.output() for input_layer in input_layers], axis=1) else: self.X = input_layers[0].output() self.W_yh = random_weights((num_input, num_output),name="W_yh") self.b_y = zeros(num_output, name="b_y") self.params = [self.W_yh, self.b_y]
def __init__(self, num_input, num_output, input_layer, temperature=1.0, name=""): self.name = "" self.X = input_layer.output() self.params = [] self.temp = temperature self.W_yh = random_weights((num_input, num_output)) self.b_y = zeros(num_output) self.params = [self.W_yh, self.b_y]
def __init__(self, input_layer, num_input, num_output, name=""): self.X = input_layer.output() self.name = name W_bound = np.sqrt(6. / (num_input + num_output)) rng = np.random.RandomState(23495) self.W = theano.shared(np.asarray(rng.uniform(low=-W_bound, high=W_bound, size=(num_input,num_output)), dtype=theano.config.floatX), name = name + " W",borrow=True) self.b = random_weights((num_output,), name=name+"_b") self.params = [self.W] self.bias = [self.b]
def __init__(self, num_input, num_output, input_layers, name=""): if len(input_layers) >= 2: self.X = T.concatenate( [input_layer.output() for input_layer in input_layers], axis=1) else: self.X = input_layers[0].output() self.W_yh = random_weights((num_input, num_output), name="W_yh") self.b_y = zeros(num_output, name="b_y") self.params = [self.W_yh, self.b_y]
def __init__self(self, num_input, num_output, input_layer, name=""): self.X = input_layer.output() self.num_input = num_input self.num_output = num_output self.W = random_weights((num_input, num_output)) self.b = zeros(num_output) self.params = [self.W, self.B]
def __init__(self, input_layer, num_input, num_cells,batch_size = 8, name="", go_backwards=False, return_sequences = False): """ LSTM Layer """ self.name = name self.num_input = num_input self.num_cells = num_cells self.return_sequences = return_sequences self.X = input_layer.output() self.h0 = theano.shared(floatX(np.zeros(num_cells,))) self.s0 = theano.shared(floatX(np.zeros(num_cells,))) self.go_backwards = go_backwards W_bound_sx = np.sqrt(6. / (num_input + num_cells)) rng = np.random.RandomState(23456) self.W_gx = theano.shared(np.asarray(rng.uniform(low=-W_bound_sx, high=W_bound_sx, size=(num_input,num_cells)), dtype=theano.config.floatX), name = name + " W_gx",borrow=True) self.W_ix = theano.shared(np.asarray(rng.uniform(low=-W_bound_sx, high=W_bound_sx, size=(num_input,num_cells)), dtype=theano.config.floatX), name = name + " W_ix",borrow=True) self.W_fx = theano.shared(np.asarray(rng.uniform(low=-W_bound_sx, high=W_bound_sx, size=(num_input,num_cells)), dtype=theano.config.floatX), name = name + " W_fx",borrow=True) self.W_ox = theano.shared(np.asarray(rng.uniform(low=-W_bound_sx, high=W_bound_sx, size=(num_input,num_cells)), dtype=theano.config.floatX), name = name + " W_ox",borrow=True) W_bound_sh = np.sqrt(6. / (num_cells + num_cells)) self.W_gh = theano.shared(np.asarray(rng.uniform(low=-W_bound_sh, high=W_bound_sh, size=(num_cells,num_cells)), dtype=theano.config.floatX), name = name + " W_gh",borrow=True) self.W_ih = theano.shared(np.asarray(rng.uniform(low=-W_bound_sh, high=W_bound_sh, size=(num_cells,num_cells)), dtype=theano.config.floatX), name = name + " W_ih",borrow=True) self.W_fh = theano.shared(np.asarray(rng.uniform(low=-W_bound_sh, high=W_bound_sh, size=(num_cells,num_cells)), dtype=theano.config.floatX), name = name + " W_fh",borrow=True) self.W_oh = theano.shared(np.asarray(rng.uniform(low=-W_bound_sh, high=W_bound_sh, size=(num_cells,num_cells)), dtype=theano.config.floatX), name = name + " W_oh",borrow=True) self.b_g = random_weights((num_cells,), name=self.name+" b_g") self.b_i = random_weights((num_cells,), name=self.name+" b_i") self.b_f = random_weights((num_cells,), name=self.name+" b_f") self.b_o = random_weights((num_cells,), name=self.name+" b_o") self.params = [self.W_gx, self.W_ix, self.W_ox, self.W_fx, self.W_gh, self.W_ih, self.W_oh, self.W_fh, ] self.bias = [self.b_g, self.b_i, self.b_f, self.b_o]
def __init__(self, num_input, num_cells, input_layers=None, name="", go_backwards=False): """ GRU Layer Takes as input sequence of inputs, returns sequence of outputs """ self.name = name self.num_input = num_input self.num_cells = num_cells if len(input_layers) >= 2: self.X = T.concatenate([input_layer.output() for input_layer in input_layers], axis=1) else: self.X = input_layers[0].output() self.s0 = zeros(num_cells) self.go_backwards = go_backwards self.U_z = random_weights((num_input, num_cells), name=self.name+"U_z") self.W_z = random_weights((num_cells, num_cells), name=self.name+"W_z") self.U_r = random_weights((num_input, num_cells), name=self.name+"U_r") self.W_r = random_weights((num_cells, num_cells), name=self.name+"W_r") self.U_h = random_weights((num_input, num_cells), name=self.name+"U_h") self.W_h = random_weights((num_cells, num_cells), name=self.name+"W_h") self.b_z = zeros(num_cells, name=self.name+"b_z") self.b_r = zeros(num_cells, name=self.name+"b_r") self.b_h = zeros(num_cells, name=self.name+"b_h") self.params = [ self.U_z, self.W_z, self.U_r, self.W_r, self.U_h, self.W_h, self.b_z, self.b_r, self.b_h ] self.output()
def __init__(self, input_layer, num_input, num_output, name=""): self.X = input_layer.output() self.name = name W_bound = np.sqrt(6. / (num_input + num_output)) rng = np.random.RandomState(23495) self.W = theano.shared(np.asarray(rng.uniform(low=-W_bound, high=W_bound, size=(num_input, num_output)), dtype=theano.config.floatX), name=name + " W", borrow=True) self.b = random_weights((num_output, ), name=name + "_b") self.params = [self.W] self.bias = [self.b]
def __init__(self, input_layer, filter_shape, subsample=(1,1),name="",border_mode='valid'): self.X = input_layer.output() self.border_mode = border_mode self.name = name self.subsample = subsample self.filter_shape = filter_shape fan_in = np.prod(filter_shape[1:]) fan_out = (filter_shape[0] * np.prod(filter_shape[2:]) / np.prod(subsample)) W_bound = np.sqrt(6. / (fan_in + fan_out)) rng = np.random.RandomState(23455) self.W = theano.shared(np.asarray(rng.uniform(low=-W_bound, high=W_bound, size=filter_shape), dtype=theano.config.floatX), name = name + " W",borrow=True) self.b = random_weights((filter_shape[0],), name=name+" bias") self.params = [self.W] self.bias = [self.b] self.get_weight = theano.function([],self.W)
def __init__(self, num_input, num_cells, input_layers=None, name="", go_backwards=False): """ LSTM Layer Takes as input sequence of inputs, returns sequence of outputs """ self.name = name self.num_input = num_input self.num_cells = num_cells if len(input_layers) >= 2: self.X = T.concatenate([input_layer.output() for input_layer in input_layers], axis=1) else: self.X = input_layers[0].output() self.h0 = theano.shared(floatX(np.zeros(num_cells))) self.s0 = theano.shared(floatX(np.zeros(num_cells))) self.go_backwards = go_backwards self.W_gx = random_weights((num_input, num_cells), name=self.name+"W_gx") self.W_ix = random_weights((num_input, num_cells), name=self.name+"W_ix") self.W_fx = random_weights((num_input, num_cells), name=self.name+"W_fx") self.W_ox = random_weights((num_input, num_cells), name=self.name+"W_ox") self.W_gh = random_weights((num_cells, num_cells), name=self.name+"W_gh") self.W_ih = random_weights((num_cells, num_cells), name=self.name+"W_ih") self.W_fh = random_weights((num_cells, num_cells), name=self.name+"W_fh") self.W_oh = random_weights((num_cells, num_cells), name=self.name+"W_oh") self.b_g = zeros(num_cells, name=self.name+"b_g") self.b_i = zeros(num_cells, name=self.name+"b_i") self.b_f = zeros(num_cells, name=self.name+"b_f") self.b_o = zeros(num_cells, name=self.name+"b_o") self.params = [self.W_gx, self.W_ix, self.W_ox, self.W_fx, self.W_gh, self.W_ih, self.W_oh, self.W_fh, self.b_g, self.b_i, self.b_f, self.b_o, ] self.output()
def __init__(self, num_input, num_cells, input_layer=None, name=""): """ LSTM Layer Takes as input sequence of inputs, returns sequence of outputs Currently takes only one input layer """ self.name = name self.num_input = num_input self.num_cells = num_cells #Setting the X as the input layer self.X = input_layer.output() self.h0 = theano.shared(floatX(np.zeros((1, num_cells)))) self.s0 = theano.shared(floatX(np.zeros((1, num_cells)))) #Initializing the weights self.W_gx = random_weights((num_input, num_cells), name=self.name+"W_gx") self.W_ix = random_weights((num_input, num_cells), name=self.name+"W_ix") self.W_fx = random_weights((num_input, num_cells), name=self.name+"W_fx") self.W_ox = random_weights((num_input, num_cells), name=self.name+"W_ox") self.W_gh = random_weights((num_cells, num_cells), name=self.name+"W_gh") self.W_ih = random_weights((num_cells, num_cells), name=self.name+"W_ih") self.W_fh = random_weights((num_cells, num_cells), name=self.name+"W_fh") self.W_oh = random_weights((num_cells, num_cells), name=self.name+"W_oh") self.b_g = zeros(num_cells, name=self.name+"b_g") self.b_i = zeros(num_cells, name=self.name+"b_i") self.b_f = zeros(num_cells, name=self.name+"b_f") self.b_o = zeros(num_cells, name=self.name+"b_o") self.params = [self.W_gx, self.W_ix, self.W_ox, self.W_fx, self.W_gh, self.W_ih, self.W_oh, self.W_fh, self.b_g, self.b_i, self.b_f, self.b_o,] self.output()
def __init__(self, num_input, num_cells, input_layers=None, name="", go_backwards=False): """ GRU Layer Takes as input sequence of inputs, returns sequence of outputs """ self.name = name self.num_input = num_input self.num_cells = num_cells if len(input_layers) >= 2: self.X = T.concatenate( [input_layer.output() for input_layer in input_layers], axis=1) else: self.X = input_layers[0].output() self.s0 = zeros(num_cells) self.go_backwards = go_backwards self.U_z = random_weights((num_input, num_cells), name=self.name + "U_z") self.W_z = random_weights((num_cells, num_cells), name=self.name + "W_z") self.U_r = random_weights((num_input, num_cells), name=self.name + "U_r") self.W_r = random_weights((num_cells, num_cells), name=self.name + "W_r") self.U_h = random_weights((num_input, num_cells), name=self.name + "U_h") self.W_h = random_weights((num_cells, num_cells), name=self.name + "W_h") self.b_z = zeros(num_cells, name=self.name + "b_z") self.b_r = zeros(num_cells, name=self.name + "b_r") self.b_h = zeros(num_cells, name=self.name + "b_h") self.params = [ self.U_z, self.W_z, self.U_r, self.W_r, self.U_h, self.W_h, self.b_z, self.b_r, self.b_h ] self.output()
def reset_state(self): self.W_yh = random_weights((self.num_input, self.num_output), name="W_yh") self.b_y = zeros(self.num_output, name="b_y") self.params = [self.W_yh, self.b_y]
def reset_state(self): self.W_yh = random_weights((self.num_input, self.num_output),name="W_yh") self.b_y = zeros(self.num_output, name="b_y") self.params = [self.W_yh, self.b_y]
def __init__(self, num_input, num_cells, input_layers=None, name="", go_backwards=False): """ LSTM Layer Takes as input sequence of inputs, returns sequence of outputs """ self.name = name self.num_input = num_input self.num_cells = num_cells if len(input_layers) >= 2: self.X = T.concatenate( [input_layer.output() for input_layer in input_layers], axis=1) else: self.X = input_layers[0].output() self.h0 = theano.shared(floatX(np.zeros(num_cells))) self.s0 = theano.shared(floatX(np.zeros(num_cells))) self.go_backwards = go_backwards self.W_gx = random_weights((num_input, num_cells), name=self.name + "W_gx") self.W_ix = random_weights((num_input, num_cells), name=self.name + "W_ix") self.W_fx = random_weights((num_input, num_cells), name=self.name + "W_fx") self.W_ox = random_weights((num_input, num_cells), name=self.name + "W_ox") self.W_gh = random_weights((num_cells, num_cells), name=self.name + "W_gh") self.W_ih = random_weights((num_cells, num_cells), name=self.name + "W_ih") self.W_fh = random_weights((num_cells, num_cells), name=self.name + "W_fh") self.W_oh = random_weights((num_cells, num_cells), name=self.name + "W_oh") self.b_g = zeros(num_cells, name=self.name + "b_g") self.b_i = zeros(num_cells, name=self.name + "b_i") self.b_f = zeros(num_cells, name=self.name + "b_f") self.b_o = zeros(num_cells, name=self.name + "b_o") self.params = [ self.W_gx, self.W_ix, self.W_ox, self.W_fx, self.W_gh, self.W_ih, self.W_oh, self.W_fh, self.b_g, self.b_i, self.b_f, self.b_o, ] self.output()
def __init__(self, input_layer, num_input, num_cells, batch_size=8, name="", go_backwards=False, return_sequences=False): """ LSTM Layer """ self.name = name self.num_input = num_input self.num_cells = num_cells self.return_sequences = return_sequences self.X = input_layer.output() self.h0 = theano.shared(floatX(np.zeros(num_cells, ))) self.s0 = theano.shared(floatX(np.zeros(num_cells, ))) self.go_backwards = go_backwards W_bound_sx = np.sqrt(6. / (num_input + num_cells)) rng = np.random.RandomState(23456) self.W_gx = theano.shared(np.asarray(rng.uniform(low=-W_bound_sx, high=W_bound_sx, size=(num_input, num_cells)), dtype=theano.config.floatX), name=name + " W_gx", borrow=True) self.W_ix = theano.shared(np.asarray(rng.uniform(low=-W_bound_sx, high=W_bound_sx, size=(num_input, num_cells)), dtype=theano.config.floatX), name=name + " W_ix", borrow=True) self.W_fx = theano.shared(np.asarray(rng.uniform(low=-W_bound_sx, high=W_bound_sx, size=(num_input, num_cells)), dtype=theano.config.floatX), name=name + " W_fx", borrow=True) self.W_ox = theano.shared(np.asarray(rng.uniform(low=-W_bound_sx, high=W_bound_sx, size=(num_input, num_cells)), dtype=theano.config.floatX), name=name + " W_ox", borrow=True) W_bound_sh = np.sqrt(6. / (num_cells + num_cells)) self.W_gh = theano.shared(np.asarray(rng.uniform(low=-W_bound_sh, high=W_bound_sh, size=(num_cells, num_cells)), dtype=theano.config.floatX), name=name + " W_gh", borrow=True) self.W_ih = theano.shared(np.asarray(rng.uniform(low=-W_bound_sh, high=W_bound_sh, size=(num_cells, num_cells)), dtype=theano.config.floatX), name=name + " W_ih", borrow=True) self.W_fh = theano.shared(np.asarray(rng.uniform(low=-W_bound_sh, high=W_bound_sh, size=(num_cells, num_cells)), dtype=theano.config.floatX), name=name + " W_fh", borrow=True) self.W_oh = theano.shared(np.asarray(rng.uniform(low=-W_bound_sh, high=W_bound_sh, size=(num_cells, num_cells)), dtype=theano.config.floatX), name=name + " W_oh", borrow=True) self.b_g = random_weights((num_cells, ), name=self.name + " b_g") self.b_i = random_weights((num_cells, ), name=self.name + " b_i") self.b_f = random_weights((num_cells, ), name=self.name + " b_f") self.b_o = random_weights((num_cells, ), name=self.name + " b_o") self.params = [ self.W_gx, self.W_ix, self.W_ox, self.W_fx, self.W_gh, self.W_ih, self.W_oh, self.W_fh, ] self.bias = [self.b_g, self.b_i, self.b_f, self.b_o]