def initialize(self, weight_type="none"): """initialize weights Parameters ---------- weight_type : string type of weights: "none", "tanh", "sigmoid" """ if self.W_h is None: self.W_h = util.init_weights("W_h", self.out_dim, self.in_dim, weight_type=weight_type) if self.W_t is None: self.W_t = util.init_weights("W_t", self.out_dim, self.in_dim, weight_type=weight_type) if self.bias_h is None: self.bias_h = util.init_weights("bias_h", self.out_dim, weight_type=weight_type) if self.bias_t is None: self.bias_t = util.shared_floatx_ones((self.out_dim, ), value=self.gate_bias, name="bias_t")
def initialize(self, weight_type="none"): """Initialize weights and bias Parameters ---------- weight_type : string type of weights: "none", "tanh", "sigmoid" """ if self.W==None: self.W=util.init_weights("W", self.out_dim, self.in_dim, weight_type=weight_type); if self.use_bias==True and self.bias==None: self.bias=util.init_weights("bias", self.out_dim, weight_type=weight_type);
def initialize(self, weight_type="none"): """Initialize weights for RNN """ Layer.initialize(weight_type) self.hidden = util.init_weights("Hidden", self.out_dim, weight_type=weight_type)
def initialize(self, weight_type="none"): """initialize weights Parameters ---------- weight_type : string type of weights: "none", "tanh", "sigmoid" """ if self.W_h is None: self.W_h=util.init_weights("W_h", self.out_dim, self.in_dim, weight_type=weight_type); if self.W_t is None: self.W_t=util.init_weights("W_t", self.out_dim, self.in_dim, weight_type=weight_type); if self.bias_h is None: self.bias_h=util.init_weights("bias_h", self.out_dim, weight_type=weight_type); if self.bias_t is None: self.bias_t=util.shared_floatx_ones((self.out_dim,), value=self.gate_bias, name="bias_t");
def init_weights(self): self.W_state=util.init_weights("W_state", self.out_dim*4, self.in_dim, weight_type="sigmoid"); self.W_cell_to_in=util.shared_floatx_nans((self.out_dim,), name="W cell to in"); self.W_cell_to_forget=util.shared_floatx_nans((self.out_dim,), name="W cell to forget"); self.W_cell_to_out=util.shared_floatx_nans((self.out_dim,), name="W cell to out"); self.init_state=util.shared_floatx_zeros((self.out_dim, ), name="initial states"); self.init_cell=util.shared_floatx_zeros((self.out_dim, ), name="initial cell");
def init_weights(self): self.W_state = util.init_weights("W_state", self.out_dim * 4, self.in_dim, weight_type="sigmoid") self.W_cell_to_in = util.shared_floatx_nans((self.out_dim, ), name="W cell to in") self.W_cell_to_forget = util.shared_floatx_nans( (self.out_dim, ), name="W cell to forget") self.W_cell_to_out = util.shared_floatx_nans((self.out_dim, ), name="W cell to out") self.init_state = util.shared_floatx_zeros((self.out_dim, ), name="initial states") self.init_cell = util.shared_floatx_zeros((self.out_dim, ), name="initial cell")
def initialize(self, weight_type="none"): """Initialize weights and bias Parameters ---------- weight_type : string type of weights: "none", "tanh", "sigmoid" """ # should have better implementation for convnet weights fan_in = self.num_channels*np.prod(self.filter_size); fan_out = self.num_filters*np.prod(self.filter_size); filter_bound=np.sqrt(6./(fan_in + fan_out)); filter_shape=(self.num_filters, self.num_channels)+(self.filter_size); self.filters = theano.shared(np.asarray(np.random.uniform(low=-filter_bound, high=filter_bound, size=filter_shape), dtype='float32'), borrow=True); if self.use_bias==True: self.bias=util.init_weights("bias", self.num_filters, weight_type=weight_type);
def initialize(self, weight_type="none"): """Initialize weights for RNN """ Layer.initialize(weight_type); self.hidden=util.init_weights("Hidden", self.out_dim, weight_type=weight_type);