Exemplo n.º 1
0
new_W = old_W
#hebbianL = theano.function([old_W], new_W)
print layer0.input
print layer0.output
print layer0.Wi
print old_W
#wi=hebbianL([layer.params[0]])
'''
for layer in layers:
	
	Wis.append( (layer.params[0],  
		#layer.params[0]))
		sparse.add( 
			layer.params[0] , 
			LR*sparse.transpose(
				sparse.structured_dot(layer.output, layer.x_yw)
				)
			)
		))
	
		
		# (layer.params[0], 
		# sparse.add( 
		# 	layer.params[0] , 
		# 	LR*sparse.sub(
		# 		sparse.structured_dot(sparse.transpose(layer.output), layer.input) , 
		# 		sparse.structured_dot(
		# 			sparse.structured_dot(
		# 				sparse.transpose(layer.output), 
		# 				layer.output),
		# 			layer.params[0])
Exemplo n.º 2
0
	def getUpdateParams(self):
		update = []
		aux = []

		# Update state
		update.append( (self.params[0], input_layer.output) )

		# Update output
		print 'Length: ' + str(len(self.connections))
		for i, c in enumerate(self.connections):
			aux.append(sparse.structured_dot(
						sparse.transpose(c.input), 
						self.params[2][i] * c.inhibition
						))
		aux2 = aux.pop()
		for a in range(len(aux)):
			aux2 = sparse.add(aux2,aux.pop())
			print aux2
		from theano import pp
		print 'out: '
		print pp(aux2)
		update.append((self.params[1],sparse.transpose(sparse.structured_sigmoid(aux2))))
		# Hardcoded!!
		'''update.append((self.params[1],
			sparse.transpose(
				sparse.structured_sigmoid(sparse.structured_dot(
						sparse.transpose(self.connections[0].input), 
						self.params[2][0])))))
		'''
		'''
		update.append((self.params[1], 
		  sparse.transpose(
			sparse.structured_sigmoid(
				sparse.structured_dot(
					sparse.transpose(self.connections[0].input), 	# Input
					self.params[2][0]))))) 							# Weights
		'''
		# Update weights
		''' #Old ones (OJA)
		for i, w in enumerate(self.params[2]):
			update.append( (w,  
				#layer.params[0]))
				sparse.add( 
					w, 
					self.LR[i]*sparse.transpose(
						sparse.structured_dot(self.params[1], self.x_yw[i])
						)
					)
				))
		'''
		for i, w in enumerate(self.params[2]):
			update.append( (w, #w))
				#layer.params[0]))
					sparse.structured_maximum(
						sparse.add(
							w,
							sparse.add(self.xy[i], 
							self.AWW[i])),
					0)
				) )

		return update
Exemplo n.º 3
0
new_W = old_W
#hebbianL = theano.function([old_W], new_W)
print layer0.input
print layer0.output
print layer0.Wi
print old_W
#wi=hebbianL([layer.params[0]])
'''
for layer in layers:
	
	Wis.append( (layer.params[0],  
		#layer.params[0]))
		sparse.add( 
			layer.params[0] , 
			LR*sparse.transpose(
				sparse.structured_dot(layer.output, layer.x_yw)
				)
			)
		))
	
		
		# (layer.params[0], 
		# sparse.add( 
		# 	layer.params[0] , 
		# 	LR*sparse.sub(
		# 		sparse.structured_dot(sparse.transpose(layer.output), layer.input) , 
		# 		sparse.structured_dot(
		# 			sparse.structured_dot(
		# 				sparse.transpose(layer.output), 
		# 				layer.output),
		# 			layer.params[0])