Exemple #1
0
	def __init__(self,inputSize, outputSize, activationType, bias, **kwargs):
		
		self.bias = bias
		self.inputSize = inputSize
		self.outputSize = outputSize
		
		##Was epsilonInit passed in?
		if 'epsilonInit' in kwargs.keys():
			self.epsilonInit = kwargs['epsilonInit']
		else:
			self.epsilonInit = 1./(2*inputSize)

		#Create W on initialization:
		if bias:
			self.W = randInit(self.inputSize+1, self.outputSize, self.epsilonInit)
		else:
			self.W = randInit(self.inputSize, self.outputSize, self.epsilonInit)
			
		# Create Activation Function Object(s):         
		# If Activation is spline, we need to compute the total number of params within this layer. 
		# We also need to create mutliple spline instances.
		if activationType == 'spline':    
			raise Exception('Spline param counting not implemented yet - SW')
			#Create as many instance as outputs here.
			self.numParams = 0
			
		else:
			self.aF = ActivationFunction(activationType)
			self.numParams = self.W.size
Exemple #2
0
class Layer(object):
	def __init__(self,inputSize, outputSize, activationType, bias, **kwargs):
		
		self.bias = bias
		self.inputSize = inputSize
		self.outputSize = outputSize
		
		##Was epsilonInit passed in?
		if 'epsilonInit' in kwargs.keys():
			self.epsilonInit = kwargs['epsilonInit']
		else:
			self.epsilonInit = 1./(2*inputSize)

		#Create W on initialization:
		if bias:
			self.W = randInit(self.inputSize+1, self.outputSize, self.epsilonInit)
		else:
			self.W = randInit(self.inputSize, self.outputSize, self.epsilonInit)
			
		# Create Activation Function Object(s):         
		# If Activation is spline, we need to compute the total number of params within this layer. 
		# We also need to create mutliple spline instances.
		if activationType == 'spline':    
			raise Exception('Spline param counting not implemented yet - SW')
			#Create as many instance as outputs here.
			self.numParams = 0
			
		else:
			self.aF = ActivationFunction(activationType)
			self.numParams = self.W.size
	
	#Each layer has it's own parameter updater and getter:
	#Here you can pass in a matrx the same size as W, or a param vector of size (numParams)
	def setParams(self, params):
		if params.shape == (self.numParams,):
			self.W = np.reshape(params, self.W.shape)
		elif params.shape == self.W.shape:
			self.W = params
		else:
			raise Exception('params must be of size (' + str(self.numParams) + ',) or ' + str(self.W.shape) + '-SW')
			
	def getParams(self):
		return self.W.ravel()
	
	#Unravel derivatives with respect to wieghts: 
	def getDW(self):
		return self.dW.ravel()
	
	#Pass inputs through network:
	def forward(self, **kwargs):
		#Takes in optional argument x, if no arg, use previous x.
		if 'x' in kwargs.keys():
			self.x = kwargs['x']
			#Only concatente ones with new inputs!
			if self.bias:
				self.x = np.hstack((np.ones((self.x.shape[0],1)), self.x))
		
		self.z = np.dot(self.x, self.W)
		self.a = self.aF.forward(self.z)
		return self.a