def _setInitEvaluable(self, evaluable): evaluable = array(evaluable) ContinuousOptimizer._setInitEvaluable(self, evaluable) self.gradient.init(evaluable) self.prevFitness = sys.float_info.max self.currentFitness = sys.float_info.min
def _setInitEvaluable(self, evaluable): ContinuousOptimizer._setInitEvaluable(self, evaluable) self.current = self._initEvaluable self.gd = GradientDescent() self.gd.alpha = self.learningRate if self.learningRateDecay is not None: self.gd.alphadecay = self.learningRateDecay self.gd.momentum = self.momentum self.gd.rprop = self.rprop self.gd.init(self._initEvaluable)
def _setInitEvaluable(self, evaluable): ContinuousOptimizer._setInitEvaluable(self, evaluable) self.current = self._initEvaluable self.gd = GradientDescent() self.gd.alpha = self.learningRate if self.learningRateDecay is not None: self.gd.alphadecay = self.learningRateDecay self.gd.momentum = self.momentum self.gd.rprop = self.rprop self.gd.init(self._initEvaluable)
def _setInitEvaluable(self, evaluable): if evaluable is not None: logging.warning("Initial point provided was ignored.") ContinuousOptimizer._setInitEvaluable(self, evaluable)
def _setInitEvaluable(self, evaluable): if evaluable is not None: logging.warning("Initial point provided was ignored.") ContinuousOptimizer._setInitEvaluable(self, evaluable)