Пример #1
0
def do_cross_validation(X, Y):
    X, Y = shuffle(X, Y)
    folds = 10
    kf = KFold(n_splits=folds, shuffle=True)
    cc = ClassifierChain()
    zero_one_score = 0
    hamming_score = 0
    accuracy = 0

    for train, test in kf.split(X):
        X_train, y_train = X[train], Y[train]
        X_test, y_test = X[test], Y[test]

        cc.fit(X_train, y_train)
        zero_one_score += cc.zero_one_loss_score(X_test, y_test)
        hamming_score += cc.hamming_loss_score(X_test, y_test)
        accuracy += cc.accuracy_score(X_test, y_test)

    zero_one_score /= folds
    hamming_score /= folds
    accuracy /= folds

    #print "0/1 Loss:", zero_one_score
    #print "Hamming Loss:", hamming_score
    #print "Accuracy:", accuracy
    #print

    return [zero_one_score, hamming_score, accuracy]