예제 #1
0
	def outputs(self,inputs):
		if self.activation == 'sigmoid':
			outputs = T.nnet.sigmoid(T.dot(inputs,self.w)+self.b)
		elif self.activation == 'softmax':
			outputs = T.nnet.softmax(T.dot(inputs,self.w)+self.b)

		elif self.activation == 'liner':		
			outputs = T.maximun((T.dot(inputs,self.w)+self.b),0)
		return outputs
예제 #2
0
파일: myrnn.py 프로젝트: ubuntu733/DeepNet
 def step(u_t,h_tm1):
     if self.activation == 'sigmoid':
         h_t = T.nnet.sigmoid(T.dot(u_t,self.w_ih)+T.dot(h_tm1,self.w_hh)+self.b_h)
         y_t = T.dot(h_t,self.w_ho) + self.b_o
     elif self.activation == 'tanh':
         h_t = T.tanh(T.dot(u_t,self.w_ih)+T.dot(h_tm1,self.w_hh)+self.b_h)
         y_t = T.dot(h_t,self.w_ho) + self.b_o
     elif self.activation == 'relu':
         h_t = T.maximun((T.dot(u_t,self.w_ih)+T.dot(h_tm1,self.w_hh)+self.b_h),0)
         y_t = T.dot(h_t,self.w_ho) + self.b_o
     else:
         raise NotImplementedError 
     return h_t,y_t