示例#1
0
    def testMinChange(self):
        optimizer = self._createPositiveOptimizer([5., 5.])
        # This is more than enough to optimize the function
        optimizer.maxLearningSteps = 10000
        optimizer.minChange = 1e-6
        result, f = optimizer.learn()

        assertListAlmostEqual(self, result, [0., 0.], 0.001)
        self.assertLess(optimizer.numLearningSteps, optimizer.maxLearningSteps)
    def testDistribution(self):
        input = [1, 2]
        thetas = [0.5, 0.4, 0.3]
        classifier = _LogisticRegression(thetas)

        distribution = classifier.getDistribution(input)
        posProbability = sigmoid(0.5 * 1 + 0.4 * 1 + 0.3 * 2)
        expectedDistribution = [1 - posProbability, posProbability]

        assertListAlmostEqual(self, distribution, expectedDistribution, 0.0001)
示例#3
0
 def testParaboloidOptimizationMaximize3(self):
     optimizer = self._createNegativeOptimizer([-5., 5.])
     result, f = optimizer.learn()
     
     assertListAlmostEqual(self, result, [0., 0.], 0.001)
示例#4
0
 def testParaboloidOptimizationMinimize2(self):
     optimizer = self._createPositiveOptimizer([5., -5.])
     result, f = optimizer.learn()
     
     assertListAlmostEqual(self, result, [0., 0.], 0.001)