Example #1
0
class Optimizer(object):
	"""
	Base class for unconstrained optimization algorithms. 
	
	*function* is the cost function (Python function or any other callable 
	object) which should be minimzied. 
	
	If *debug* is greater than 0, debug messages are printed at standard output. 
	
	*fstop* specifies the cost function value at which the algorithm is 
	stopped. If it is ``None`` the algorithm is never stopped regardless of 
	how low the cost function's value gets. 
	
	*maxiter* specifies the number of cost function evaluations after which 
	the algorithm is stopped. If it is ``None`` the number of cost function 
	evaluations is unlimited. 
	
	*nanBarrier* specifies if NaN function values should be treated as 
	infinite thus resulting in an extreme barrier. 
	
	*cache* turns on local point caching. Currently works only for 
	algorithms that do not use remote evaluations. See the 
	:mod:`~pyopus.optimizer.cache` module for more information.
	
	The following members are available in every object of the 
	:class:`Optimizer` class
	
	* :attr:`ndim` - dimension of the problem. Updated when the :meth:`reset` 
	  method is called. 
	* :attr:`niter` - the consecutive number of cost function evaluation.
	* :attr:`x` - the argument to the cost function resulting in the best-yet 
	  (lowest) value.
	* :attr:`f` - the best-yet value of the cost function. 
	* :attr:`bestIter` - the iteration in which the best-yet value of the cost 
	  function was found. 
	* :attr:`bestAnnotations` - a list of annotations produced by the installed 
	  annotators for the best-yet value of the cost function.
	* :attr:`stop` - boolean flag indicating that the algorithm should stop. 
	* :attr:`annotations` - a list of annotations produced by the installed 
	  annotators for the last evaluated cost function value
	* :attr:`annGrp` - :class:`AnnotatorGroup` object that holds the installed 
	  annotators
	* :attr:`plugins` - a list of installed plugin objects
	
	Plugin objects are called at every cost function evaluation or whenever a 
	remotely evaluated cost function value is registered by the 
	:meth:`newResult` method. 
	
	Values of *x* and related members are arrays. 
	"""
	def __init__(self, function, debug=0, fstop=None, maxiter=None, nanBarrier=False, cache=False):
		# Function subject to optimization, must be picklable for parallel optimization methods. 
		self.function=function
		
		# Debug mode flag
		self.debug=debug
		
		# Problem dimension
		self.ndim=None
		
		# Stopping conditions
		self.fstop=fstop
		self.maxiter=maxiter
		
		# NaN barrier
		self.nanBarrier=nanBarrier
		
		# Cache
		if cache:
			self.cache=Cache()
		else:
			self.cache=None
		
		# Iteration counter
		self.niter=0
		
		# Best-yet point
		self.x=None
		self.f=None
		self.bestIter=None
		self.bestAnnotations=None
		
		# Plugins
		self.plugins=[]
		
		# Annotator group
		self.annGrp=AnnotatorGroup()
		
		# Stop flag
		self.stop=False
		
		# Annotations produced at last cost function evaluation
		self.annotations=None
		
	def check(self):
		"""
		Checks the optimization algorithm's settings and raises an exception 
		if something is wrong. 
		"""
		if (self.fun is None):
			raise Exception, DbgMsg("OPT", "Cost function not defined.")
		
		if self.maxiter is not None and self.maxiter<1:
			raise Exception, DbgMsg("OPT", "Maximum number of iterations must be at least 1.")

	def installPlugin(self, plugin):
		"""
		Installs a plugin object or an annotator in the plugins list 
		and/or annotators list
		
		Returns two indices. The first is the index of the installed 
		annotator while the second is the index of the instaleld 
		plugin. 
		
		If teh object is not an annotator, the first index is 
		``None``. Similarly, if the object is not a plugin the second 
		index is ``None``. 
		"""
		i1=None
		if issubclass(type(plugin), Annotator):
			i1=self.annGrp.add(plugin)
		i2=None
		if issubclass(type(plugin), Plugin):
			self.plugins.append(plugin)
			i2=len(self.plugins)-1
		
		return (i1, i2)
		
	def getEvaluator(self, x):
		"""
		Returns a tuple holding the evaluator function and its positional 
		arguments that evaluate the problem at *x*. This tuple can be 
		sent to a remote computing node and evaluation is invoked using:: 
		
		  # Tuple t holds the function and its arguments
		  func,args=t
		  retval=func(*args)
		  # Send retval back to the master
		
		The first positional argument is the point to be evaluated. 
		
		The evaluator returns a tuple of the form (f,annotations).
		"""
		return UCEvaluator, [x, self.function, self.annGrp, self.nanBarrier]
	
	def fun(self, x, count=True):
		"""
		Evaluates the cost function at *x* (array). If *count* is ``True`` the 
		:meth:`newResult` method is invoked with *x*, the obtained cost 
		function value, and *annotations* argument set to ``None``. This means 
		that the result is registered (best-yet point information are updated), 
		the plugins are calls and the annotators are invoked to produce 
		annotations. 
		
		Use ``False`` for *count* if you need to evaluate the cost function 
		for debugging purposes. 
		
		Returns the value of the cost function at *x*. 
		"""
		data=None
		if self.cache is not None:
			data=self.cache.lookup(x)
			
		if data is not None:
			f,annot,it=data
		else:
			# Evaluate
			evf, args = self.getEvaluator(x)
			f,annot=evf(*args)
		
		# Do the things that need to be done with a new result
		# No annotation is provided telling the newResult() method that the 
		# function evaluation actually happened in this process. 
		if count:
			self.newResult(x, f, annot)
			
		return np.array(f)
	
	def updateBest(self, x, f):
		"""
		Updates best yet function value. 
		Returns ``True`` if an update takes place. 
		"""
		if (self.f is None) or (self.f>f):
			self.f=f
			self.x=x
			self.bestIter=self.niter
			return True
		
		return False
		
	def newResult(self, x, f, annotations=None):
		"""
		Registers the cost function value *f* obtained at point *x* with 
		annotations list given by *annotations*. 
		
		Increases the :attr:`niter` member to reflect the iteration number of 
		the point being registered and updates the :attr:`f`, :attr:`x`, and 
		:attr:`bestIter` members. 
		
		If the *annotations* argument is given, it must be a list with as many 
		members as there are annotator objects installed in the optimizer. The 
		annotations list is stored in the :attr:`annotations` member. If *f* 
		improves the best-yet value annotations are also stored in the 
		:attr:`bestAnnotations` member. The *annotations* are consumed by calling 
		the :meth:`consume` method of the annotators. 
		
		Finally it is checked if the best-yet value of cost function is below 
		:attr:`fstop` or the number of iterations exceeded :attr:`maxiter`. If 
		any of these two conditions is satisfied, the algorithm is stopped by 
		setting the :attr:`stop` member to ``True``. 
		"""
		# Increase evaluation counter
		self.niter+=1
		
		# Store in cache
		if self.cache and self.cache.lookup(x) is None:
			self.cache.insert(x, (f, annotations, self.niter))
		
		# Update best-yet
		updated=self.updateBest(x, f)
		
		# If no annotation are given, function evaluation happened in this process
		if annotations is not None:
			# Put annotations in annotations list
			self.annotations=annotations
			# Consume annotations
			self.annGrp.consume(annotations)
		
		# Update best-yet annotations
		if updated:
			self.bestAnnotations=self.annotations
		
		# Annotations are set up. Call plugins. 
		nplugins=len(self.plugins)
		for index in range(0,nplugins):
			plugin=self.plugins[index]
			if plugin is not None:
				stopBefore=self.stop
				plugin(x, f, self)
				if self.debug and self.stop and not stopBefore: 
					DbgMsgOut("OPT", "Run stopped by plugin object.")
		
		# Force stop condition on f<=fstop
		if (self.fstop is not None) and (self.f<=self.fstop):
			self.stop=True
			
			if self.debug:
				DbgMsgOut("OPT", "Function fell below desired value. Stopping.")
		
		# Force stop condition on niter>maxiter
		if self.maxiter is not None and self.niter>=self.maxiter:
			self.stop=True
			
			if self.debug:
				DbgMsgOut("OPT", "Maximal number of iterations exceeded. Stopping.")
	
	def reset(self, x0):
		"""
		Puts the optimizer in its initial state and sets the initial point to 
		be the 1-dimensional array or list *x0*. The length of the array 
		becomes the dimension of the optimization problem 
		(:attr:`ndim` member). 
		"""
		# Debug message
		if self.debug:
			DbgMsgOut("OPT", "Resetting.")
			
		# Determine dimension of the problem from initial point
		x0=array(x0)
		self.ndim=x0.shape[0]
		
		if x0.ndim!=1:
			raise Exception, DbgMsg("OPT", "Initial point must be a vector.")
			
		# Store initial point
		self.x=x0.copy()
		self.f=None
		
		# Reset iteration counter
		self.niter=0
		
		# Reset plugins
		for plugin in self.plugins:
			if plugin is not None:
				plugin.reset()
			
	def run(self):
		"""
		Runs the optimization algorithm. 
		"""
		# Does nothing, reimplement this in a derived class. 
		pass