Example #1
0
 def _setInitEvaluable(self, evaluable):
     evaluable = array(evaluable)
     ContinuousOptimizer._setInitEvaluable(self, evaluable)
     
     self.gradient.init(evaluable)
     self.prevFitness = sys.float_info.max
     self.currentFitness = sys.float_info.min     
Example #2
0
 def _setInitEvaluable(self, evaluable):
     ContinuousOptimizer._setInitEvaluable(self, evaluable)
     self.current = self._initEvaluable
     self.gd = GradientDescent()
     self.gd.alpha = self.learningRate
     if self.learningRateDecay is not None:
         self.gd.alphadecay = self.learningRateDecay
     self.gd.momentum = self.momentum
     self.gd.rprop = self.rprop
     self.gd.init(self._initEvaluable)
Example #3
0
 def _setInitEvaluable(self, evaluable):
     ContinuousOptimizer._setInitEvaluable(self, evaluable)
     self.current = self._initEvaluable
     self.gd = GradientDescent()
     self.gd.alpha = self.learningRate
     if self.learningRateDecay is not None:
         self.gd.alphadecay = self.learningRateDecay
     self.gd.momentum = self.momentum
     self.gd.rprop = self.rprop
     self.gd.init(self._initEvaluable)
Example #4
0
 def _stoppingCriterion(self):
     result = False
     
     if ContinuousOptimizer._stoppingCriterion(self):
         result =  True
     # Check if during the last step we made a significant progress
     elif abs(self.prevFitness - self.currentFitness) < self.minChange:   
         result =  True
     
     self.prevFitness = self.currentFitness
     
     return result
Example #5
0
 def __init__(self, evaluator=None, initEvaluable=None, gradientsCalculator=None, gradient = GradientDescent(), minChange = 1e-6, minimize = True):
     """
     evaluator - the same as in superclass
     initEvaluable - the same as in superclass
     gradientsCalculator - function that calculates partial derivatives for each variable for specified coordinates. 
     It should accept the only parameter - list of current values of parameters and should return an array of partial derivatives values.
     Value of partial derivative for parameter X should have the same position in the result array as parameter X has in parameters array.
     gradient (GradientDescent) - class that changes values of parameters using current gradient
     minChange -  
     """
     
     self.gradientsCalculator = gradientsCalculator
     self.gradient = gradient
     self.minChange = minChange        
     
     self.prevFitness = sys.float_info.max
     self.currentFitness = sys.float_info.min     
     
     initEvaluable = array(initEvaluable)
     ContinuousOptimizer.__init__(self, evaluator, initEvaluable)
     self.minimize = minimize
Example #6
0
 def _setInitEvaluable(self, evaluable):
     if evaluable is not None:
         logging.warning("Initial point provided was ignored.")
     ContinuousOptimizer._setInitEvaluable(self, evaluable)        
Example #7
0
 def _setInitEvaluable(self, evaluable):
     if evaluable is not None:
         logging.warning("Initial point provided was ignored.")
     ContinuousOptimizer._setInitEvaluable(self, evaluable)