Example #1
0
           [5.6, 3.0, 4.1, 1.3, 2], [4.7, 3.2, 1.6, 0.2, 1],
           [6.6, 3.0, 4.4, 1.4, 2]]

from materials.decision_tree_learning import build_tree, classify, max_classification

if __name__ == '__main__':
    x = input().split(', ')
    test_case = list(map(float, x[:-1])) + [int(x[-1])]

    thirty = int(len(dataset) * 0.3)

    first_data = dataset[:thirty]
    second_data = dataset[thirty:thirty * 2]
    third_data = dataset[thirty * 2:]

    first_tree = build_tree(first_data)
    second_tree = build_tree(second_data)
    third_tree = build_tree(third_data)

    # what we want (dict):
    # 0 -> 1; 1 -> 0; 2 -> 0
    # 0 -> 1; 1 -> 1; 2 -> 0
    # 0 -> 1; 1 -> 2; 2 -> 0
    # prediction: 1 (with most votes)

    # setting default values
    # votes[0] = 0; votes[1] = 0; votes[2] = 0
    votes = {0: 0, 1: 0, 2: 0}

    # casting votes (the predictions)
    first_pred = max_classification(classify(test_case, first_tree))
Example #2
0
    att3 = float(input())
    att4 = float(input())
    planttype = input()
    testCase = [att1, att2, att3, att4, planttype]

    # we use '//' for getting an integer as a result (we use integers as indices in slicing)
    half = len(trainingData) // 2

    # slice
    # lista[startRange:endRange]
    tr_data_1 = trainingData[:
                             half]  # [:endRange] -> starts from the start of the list, in this case 'trainingData'
    tr_data_2 = trainingData[
        half:]  # [startRange:] -> stops at the end of the list, in this case 'trainingData'

    tree1 = build_tree(tr_data_1)
    tree2 = build_tree(tr_data_2)

    # pred_1 and pred_2 are dicts, so we need to send them to the max_classification() function to get the predicted class
    pred_1 = classify(testCase, tree1)
    pred_2 = classify(testCase, tree2)

    pred_1_class = max_classification(pred_1)
    pred_2_class = max_classification(pred_2)

    if pred_1_class == pred_2_class:
        # High == High
        print(pred_1_class)
    else:
        # High != Low
        print("KONTRADIKCIJA")
Example #3
0
        [600.0, 29.4, 32.0, 37.2, 41.5, 15.0, 'Bream'],
        [145.0, 22.0, 24.0, 25.5, 25.0, 15.0, 'Perch'],
        [1100.0, 40.1, 43.0, 45.5, 27.5, 16.3, 'Perch']]

if __name__ == "__main__":
    test_case = input()
    test_case = [float(x) for x in test_case.split(', ')[:-1]
                 ] + [test_case.split(', ')[-1]]

    roach_set = []
    for row in data:
        # list[-1] -> the last element of the list
        if row[-1] == 'Roach':
            roach_set.append(row)
    roach_set = roach_set[:40]

    # list comprehension -> the process of creating a list, from another list
    # the syntax: new_list = [what-we-want-to-keep-in-the-new-list for item in otherList if some-condition]
    # some-condition is optional
    pike_set = [row for row in data if row[-1] == 'Pike'][:40]
    # roach_set = [row for row in data if row[-1] == 'Roach'][:40]

    # the '+' operator on lists combines the elements of the two lists into one list
    train_data = roach_set + pike_set

    tree = build_tree(train_data)

    class_dict = classify(test_case, tree)
    class_name = max_classification(class_dict)
    print(class_name)
Example #4
0
        new_test.append(new_row)

    # second way (one loop, one list comprehension)
    """
    new_test = []
    for row in test:
        new_row = [row[col_index] for col_index in range(len(row)) if col_index != column_ind]
        new_test.append(new_row)
    """

    # third way (two nested list comprehensions)
    """
    new_test = [[row[col_index] for col_index in range(len(row)) if col_index != column_ind] for row in test]
    """

    tree1 = build_tree(train)
    tree2 = build_tree(new_train)

    num_correct_1 = 0
    num_correct_2 = 0

    # one way (with two separate loops)
    for row in test:
        tree_1_pred = max_classification(classify(row, tree1))

        correct_class = row[-1]

        if tree_1_pred == correct_class:
            num_correct_1 = num_correct_1 + 1

    for row in new_test: