def create_parameters(self): n_in, n_hidden = self.n_in, self.n_hidden activation = self.activation self.w1 = create_shared(random_init((n_in,)), name="w1") self.w2 = create_shared(random_init((n_hidden,)), name="w2") bias_val = random_init((1,))[0] self.bias = theano.shared(np.cast[theano.config.floatX](bias_val)) rlayer = RCNN((n_in+1), n_hidden, activation=activation, order=2) self.rlayer = rlayer self.layers = [ rlayer ]
def create_parameters(self): n_in, n_hidden = self.n_in, self.n_hidden activation = self.activation self.w1 = create_shared(random_init((n_in, )), name="w1") self.w2 = create_shared(random_init((n_hidden, )), name="w2") bias_val = random_init((1, ))[0] self.bias = theano.shared(np.cast[theano.config.floatX](bias_val)) rlayer = RCNN((n_in + 1), n_hidden, activation=activation, order=2) self.rlayer = rlayer self.layers = [rlayer]
def __init__(self, n_in, n_out, activation=tanh, order=1, clip_gradients=False): self.n_in = n_in self.n_out = n_out self.activation = activation self.order = order self.clip_gradients = clip_gradients self.input_shape = (None, 1, n_in, None) self.filter_shape = (n_out, 1, n_in, order*2-1) self.W = create_shared(random_init(self.filter_shape), name="W") self.bias = create_shared(random_init((n_out,)), name="bias")
def create_parameters(self): n_in, n_genclassess = self.n_in, self.n_genclassess activation = self.activation seed = self.seed print 'in create parameters' self.w_s = create_shared(random_init((n_in, n_genclassess), seed=seed), name="w1") bias_val_s = random_init((n_genclassess, ), seed=seed)[0] self.bias_s = theano.shared(np.cast[theano.config.floatX](bias_val_s)) self.layers = []