def create_node(self, data_set, max_depth, feature_index_list, boost=False): """ Recursive function that constructs the decision tree by DFS approach by setting respective parameters and branching based on information gain and entropy. :param data_set: :param max_depth: :param feature_index_list: :return: """ # tree_node = None is_termination_condition, class_label = self.test_termination_condition(data_set, max_depth, feature_index_list) tree_node = TreeNode() eval_util = EvalUtil() data_util = DataUtils() feature_split_index = eval_util.get_split_attribute_index(data_set, feature_index_list, boost) # print feature_split_index tree_node.set_split_feature_index(feature_split_index) split_feature_values = data_util.get_feature_discrete_values(data_set, feature_split_index) # tree_node.set_pos_neg(positive, negative) tree_node.set_class_label(class_label) revised_index_list = [x for x in feature_index_list if x != feature_split_index] if not is_termination_condition: for value in split_feature_values: data_subset = data_util.get_data_subset(data_set, feature_split_index, value) if len(data_subset) > 0: child_node = self.create_node(data_subset, max_depth - 1, revised_index_list) tree_node.append_child(value, child_node) else: tree_node.append_child(value, None) return tree_node