def test_forest_list(forest_list, cv, database): classes = db.get_target_attribute_values() f1measure = 0 for i, forest in enumerate(forest_list): test_set = cv.folds[i] (real_labels, pred_labels) = forest.test(test_set, debug) confusion_matrix = ConfusionMatrix.create_confusion_matrix( real_labels, pred_labels, classes) f1measure += Metrics.f1measure(confusion_matrix) if debug: print('Testing the forest with the dataset:') print(test_set) print('Test Results:') print('%d: ' % (i + 1)) print(confusion_matrix) f1measure /= len(forest_list) if debug: print('Mean F1-Measure: ', f1measure) return f1measure
real_classes = [ 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c2', 'c2', 'c2', 'c2', 'c2', 'c3', 'c3', 'c3', 'c3', 'c3', 'c3', 'c3', 'c3', 'c3', 'c3' ] pred_classes_array = [ 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c1', 'c2', 'c3', 'c3', 'c3', 'c2', 'c2', 'c2', 'c2', 'c1', 'c3', 'c3', 'c3', 'c3', 'c3', 'c3', 'c1', 'c1', 'c2', 'c2' ] classes = ['c1', 'c2', 'c3'] confusion_matrix = ConfusionMatrix.create_confusion_matrix( real_classes, pred_classes_array, classes) sklean_confusion_matrix = metrics.confusion_matrix(real_classes, pred_classes_array, classes) print(confusion_matrix) print(sklean_confusion_matrix) f1measure = f1measure(confusion_matrix) sklearn_f1measure = metrics.precision_recall_fscore_support( real_classes, pred_classes_array, average='macro') print('f1measure\t', f1measure) print('sk fb_score\t', sklearn_f1measure[2])