def init_params(self): options = self.options insize = options.get('insize') outsize = options.get('outsize') init = options.get('init') W = options.get('W') if W is None: W = init((insize, outsize)) W = theano.shared( value=W, name='W', borrow=True ) b = options.get('b') if b is None: b = np.zeros(outsize, dtype=theano.config.floatX) b = theano.shared( value=b, name='b', borrow=True ) return [W, b]
def init_params(self): options = self.options in_dim = options.get('insize') out_dim = options.get('outsize') init = options.get('init') W_softmax = options.get('W_softmax') if W_softmax is None: W_softmax = init((in_dim, out_dim)) W_softmax = theano.shared( value=W_softmax, name='W_softmax', borrow=True ) b_softmax = options.get('b_softmax') if b_softmax is None: b_softmax = np.zeros(out_dim, dtype=theano.config.floatX) b_softmax = theano.shared( value=b_softmax, name='b_softmax', borrow=True ) return [W_softmax, b_softmax]
def init_params(self): insize = self.options.get("insize") outsize = self.options.get("outsize") W = self.options.get("W") b = self.options.get("b") W_h = self.options.get("W_h") init = self.options.get("init") if W is None: W = init((insize, outsize)) if b is None: b = np.zeros(shape=(outsize,), dtype=theano.config.floatX) if W_h is None: W_h = init((outsize, outsize)) W = theano.shared(value=W, borrow=True, name="W") b = theano.shared(value=b, borrow=True, name="b") W_h = theano.shared(value=W_h, borrow=True, name="W_h") return [W, b, W_h]
def init_params(self): insize = self.options.get('insize') outsize = self.options.get('outsize') W = self.options.get('W') b = self.options.get('b') W_h = self.options.get('W_h') init = self.options.get('init') if W is None: W = init((insize, outsize)) if b is None: b = np.zeros(shape=(outsize,), dtype=theano.config.floatX) if W_h is None: W_h = init((outsize, outsize)) W = theano.shared(value=W, borrow=True, name='W') b = theano.shared(value=b, borrow=True, name='b') W_h = theano.shared(value=W_h, borrow=True, name='W_h') return [W, b, W_h]
def init_params(self): options = self.options insize = options.get("insize") outsize = options.get("outsize") init = options.get("init") W = options.get("W") if W is None: W = init((insize, outsize)) W = theano.shared(value=W, name="W", borrow=True) b = options.get("b") if b is None: b = np.zeros(outsize, dtype=theano.config.floatX) b = theano.shared(value=b, name="b", borrow=True) return [W, b]
def make_shared_matrix(p, ins, outs): if p is None: p = init((ins, outs)) return theano.shared(value=p, borrow=True)