def _createNegativeOptimizer(self, startParams): optimizer = GradientOptimizer(negativeParabaloidEvaluator, startParams, negativeDerivativesCalculator) optimizer.maxLearningSteps = 200 optimizer.minimize = False return optimizer
def _createPositiveOptimizer(self, startParams): optimizer = GradientOptimizer(positiveParabaloidEvaluator, startParams, positiveDerivativesCalculator) optimizer.maxLearningSteps = 200 optimizer.minimize = True return optimizer