コード例 #1
0
ファイル: hw2_run.py プロジェクト: rohinip/GA_homework
    classifier_list = [("KNN",knn)]
  elif (args.classifier.upper() == "NB"):
    classifier_list = [("Naive Bayes",nb)]
  elif (args.classifier.upper() == "LR"): 
    classifier_list = [("Logistic Regression",lr)]
except:
  classifier_list = [("KNN",knn), ("Naive Bayes",nb), ("Logistic Regression",lr)] #using imported functions above

# Loop through each tuple of the classifier list
for (classifier_string, classifier_function) in classifier_list:
  
  print "\n-------- %s --------" % classifier_string

  best_kfolds = 0
  best_cv = 0

  # Define specific set of folds that split the dataset in an even way
  for kfolds in [2,3,5,10,15,30,50,75]: 

    cv = cross_validate(features, species, classifier_function, kfolds)

    if cv > best_cv:
      best_cv = cv
      best_kfolds = kfolds
    
    # For each fold, print out it's accuracy in the cross validation function
    print "Fold <<%s>> :: Accuracy <<%s>>" % (kfolds, cv)

  # At the end, print the highest accuracy fold
  print "%s :: Highest Accuracy: <<%s>> :: Fold <<%s>>\n" % (classifier_string, best_cv, best_kfolds)
コード例 #2
0
ファイル: hw2_test.py プロジェクト: pchadri/GA_homework
from hw2 import load_iris_data, cross_validate, knn, nb, lr, logistic

(XX,yy,y)=load_iris_data()

classfiers_to_cv=[("kNN",knn),("Naive Bayes",nb), ("Linear Regression",lr), ("Logistic REgression",logistic)]

for (c_label, classifer) in classfiers_to_cv :

    print
    print "---> %s <---" % c_label

    best_k=0
    best_cv_a=0
    for k_f in [2,3,5,10,15,30,50,75] :
       cv_a = cross_validate(XX, yy, classifer, k_fold=k_f)
       if cv_a >  best_cv_a :
            best_cv_a=cv_a
            best_k=k_f

       print "fold <<%s>> :: acc <<%s>>" % (k_f, cv_a)

    print "\n %s Highest Accuracy: fold <<%s>> :: <<%s>>\n" % (c_label, best_k, best_cv_a)

コード例 #3
0
ファイル: hw2_run.py プロジェクト: rohinip/GA_homework
    elif (args.classifier.upper() == "LR"):
        classifier_list = [("Logistic Regression", lr)]
except:
    classifier_list = [("KNN", knn), ("Naive Bayes", nb),
                       ("Logistic Regression", lr)
                       ]  #using imported functions above

# Loop through each tuple of the classifier list
for (classifier_string, classifier_function) in classifier_list:

    print "\n-------- %s --------" % classifier_string

    best_kfolds = 0
    best_cv = 0

    # Define specific set of folds that split the dataset in an even way
    for kfolds in [2, 3, 5, 10, 15, 30, 50, 75]:

        cv = cross_validate(features, species, classifier_function, kfolds)

        if cv > best_cv:
            best_cv = cv
            best_kfolds = kfolds

        # For each fold, print out it's accuracy in the cross validation function
        print "Fold <<%s>> :: Accuracy <<%s>>" % (kfolds, cv)

    # At the end, print the highest accuracy fold
    print "%s :: Highest Accuracy: <<%s>> :: Fold <<%s>>\n" % (
        classifier_string, best_cv, best_kfolds)