コード例 #1
0
def decision_tree_learn(examples, attributes, parent_examples):
    # If no examples, use plurality of parent
    if not examples:
        # print "++ No examples, using probabilities of parent"
        return dt_util.get_probability_node(parent_examples)

    # If all are of the same class, use that class
    common_class = dt_util.same_class(examples)
    if common_class:
        # print "++ All of common class"
        return common_class

    # If there are no more attributes left, use plurality
    if not attributes:
        # print "++ No attributes, using plurality"
        return dt_util.get_probability_node(examples)

    # print "++ Computing best attribute"
    A, tree = get_best_attribute(attributes, examples)

    if A is None:
        return dt_util.get_probability_node(examples)

    for key, exs in group_by_attribute(A, examples):
        new_attr = list(attributes)
        new_attr.remove(A)
        subtree = decision_tree_learn(exs, new_attr, examples)
        tree.add_subtree(key, subtree)

    return tree
コード例 #2
0
def decision_tree_learn(examples, attributes, parent_examples):
    # If no examples, use plurality of parent
    if not examples:
        #print "++ No examples, using probabilities of parent"
        return dt_util.get_probability_node(parent_examples)

    # If all are of the same class, use that class
    common_class = dt_util.same_class(examples)
    if common_class:
        #print "++ All of common class"
        return common_class

    # If there are no more attributes left, use plurality
    if not attributes:
        #print "++ No attributes, using plurality"
        return dt_util.get_probability_node(examples)

    #print "++ Computing best attribute"
    A, tree = get_best_attribute(attributes, examples)

    if A is None:
        return dt_util.get_probability_node(examples)

    for key, exs in group_by_attribute(A, examples):
        new_attr = list(attributes)
        new_attr.remove(A)
        subtree = decision_tree_learn(exs, new_attr, examples)
        tree.add_subtree(key, subtree)

    return tree
コード例 #3
0
def decision_tree_learn(examples, attributes, parent_examples):
  # If no examples, use plurality of parent
  if not examples:
    print "++ No examples, using plurality of parent"
    return dt_util.get_plurality_node(parent_examples)

  # If all are of the same class, use that class
  common_class = dt_util.same_class(examples)
  if common_class:
    print "++ All of common class"
    return common_class

  # If there are no more attributes left, use plurality
  if not attributes:
    print "++ No attributes, using plurality"
    return dt_util.get_plurality_node(examples)

  print "++ Computing best attribute"
  A, tree = get_best_attribute(attributes, examples)

  if A is None:
    return dt_util.get_plurality_node(examples)

  print "++ Splitting on attribute: %s" % A
 
  # Since all the attributes are continuous, do binary split
  lt_exs, gt_exs = split_by_attribute(A, tree.get_split_point(), examples)

  if (len(lt_exs) == 0) or (len(gt_exs) == 0):
    return dt_util.get_plurality_node(examples)

  lt_subtree = decision_tree_learn(lt_exs, attributes, examples)
  gt_subtree = decision_tree_learn(gt_exs, attributes, examples)

  tree.add_less_than_subtree(lt_subtree)
  tree.add_greater_than_subtree(gt_subtree)
  return tree
コード例 #4
0
def decision_tree_learn(examples, attributes, parent_examples):
    # If no examples, use plurality of parent
    if not examples:
        print "++ No examples, using plurality of parent"
        return dt_util.get_plurality_node(parent_examples)

    # If all are of the same class, use that class
    common_class = dt_util.same_class(examples)
    if common_class:
        print "++ All of common class"
        return common_class

    # If there are no more attributes left, use plurality
    if not attributes:
        print "++ No attributes, using plurality"
        return dt_util.get_plurality_node(examples)

    print "++ Computing best attribute"
    A, tree = get_best_attribute(attributes, examples)

    if A is None:
        return dt_util.get_plurality_node(examples)

    print "++ Splitting on attribute: %s" % A

    # Since all the attributes are continuous, do binary split
    lt_exs, gt_exs = split_by_attribute(A, tree.get_split_point(), examples)

    if (len(lt_exs) == 0) or (len(gt_exs) == 0):
        return dt_util.get_plurality_node(examples)

    lt_subtree = decision_tree_learn(lt_exs, attributes, examples)
    gt_subtree = decision_tree_learn(gt_exs, attributes, examples)

    tree.add_less_than_subtree(lt_subtree)
    tree.add_greater_than_subtree(gt_subtree)
    return tree