def testMinSplit(self): decisionTree = DecisionTree() decisionTree.setMinSplit(20) decisionTree.learnModel(self.X, self.y) size = orngTree.countNodes(decisionTree.getClassifier()) #orngTree.printTree(decisionTree.getClassifier()) decisionTree.setMinSplit(0) decisionTree.learnModel(self.X, self.y) size2 = orngTree.countNodes(decisionTree.getClassifier()) #orngTree.printTree(decisionTree.getClassifier()) self.assertTrue(size < size2)
def testSetMaxDepth(self): maxDepth = 20 decisionTree = DecisionTree() decisionTree.setMaxDepth(maxDepth) decisionTree.learnModel(self.X, self.y) self.assertTrue(DecisionTree.depth(decisionTree.getClassifier().tree) <= maxDepth+1) maxDepth = 5 decisionTree = DecisionTree() decisionTree.setMaxDepth(maxDepth) decisionTree.learnModel(self.X, self.y) self.assertTrue(DecisionTree.depth(decisionTree.getClassifier().tree) <= maxDepth+1)
def testSetMaxDepth(self): maxDepth = 20 decisionTree = DecisionTree() decisionTree.setMaxDepth(maxDepth) decisionTree.learnModel(self.X, self.y) self.assertTrue( DecisionTree.depth(decisionTree.getClassifier().tree) <= maxDepth + 1) maxDepth = 5 decisionTree = DecisionTree() decisionTree.setMaxDepth(maxDepth) decisionTree.learnModel(self.X, self.y) self.assertTrue( DecisionTree.depth(decisionTree.getClassifier().tree) <= maxDepth + 1)
def testLearnModel(self): decisionTree = DecisionTree() decisionTree.learnModel(self.X, self.y) tree = decisionTree.getClassifier()