y = training_answer
            clf.fit(X, y)

            # train DT
            clf_DT = tree.DecisionTreeClassifier()
            clf_DT = clf_DT.fit(X, y)

            # use clf to perdict
            # use the same feature as training data
            out_put = clf.predict(
                wave_coef_matrix_testig[:, selected_feature_index])
            out_put_DT = clf_DT.predict(
                wave_coef_matrix_testig[:, selected_feature_index])

            # see how good it is
            per_correct = fun.predict_accuracy(testing_answer, out_put)
            per_correct_DT = fun.predict_accuracy(testing_answer, out_put_DT)
            # add the results to all_per_correct
            all_per_correct.append(per_correct)
            all_per_correct_DT.append(per_correct_DT)

            ## NN classifier
            bestseed = 0
            bestloss = 10000000
            for i in range(0, 10, 1):
                r = random.randint(0, 10000000)
                clf_nn = MLPClassifier(solver='lbfgs',
                                       activation='logistic',
                                       hidden_layer_sizes=(20, 10, 5),
                                       random_state=r,
                                       max_iter=1000)
            
        # for each column the majority of 1 or 0 wins the result  
        perdiction = []
        all_ones_nn = np.asarray(all_ones_nn)
        
        for jj in range(0, all_ones_nn.shape[1]):
            voting_results = np.count_nonzero(all_ones_nn[:,jj])
            if voting_results > m/2:
                perdiction.append(1)
            else:
                perdiction.append(0)
                    
        # calculate pertange correct
        perdiction = np.asarray(perdiction)
        
        per_correct_nn = fun.predict_accuracy(answer_sheet, perdiction)
        per_correct_ITD = np.sum(perdiction[:row_num/2].astype(float))/(row_num/2)
        per_correct_noITD = np.sum(perdiction[row_num/2:].astype(float))/(row_num/2)
        
        four_sit_result_nn.append(per_correct_nn)
        four_sit_result_nn_ITD.append(per_correct_ITD)
        four_sit_result_nn_noITD.append(per_correct_noITD)
        
    all_accuracy_both.append(four_sit_result_nn)
    all_accuracy_ITD.append(four_sit_result_nn_ITD)
    all_accuracy_noITD.append(four_sit_result_nn_noITD)
    
all_accuracy_both = np.asarray(all_accuracy_both)
all_accuracy_ITD = np.asarray(all_accuracy_ITD)
all_accuracy_noITD = np.asarray(all_accuracy_noITD)
                 
Exemple #3
0
         bestloss = clf_nn.loss_
         bestseed = r
     
 print(clf_nn.loss_, bestloss)
 
 # save loss
 all_loss.append(bestloss)
 
 clf_nn = MLPClassifier(solver='lbfgs', activation='logistic', hidden_layer_sizes=(20,10,5),random_state=bestseed,max_iter=1000)
 clf_nn.fit(X,y)
 
 # before making perdictions
 # add the subject column to testing matrix as well
 wave_coef_matrix_testig_subject = np.concatenate((wave_coef_matrix_testig[:,selected_feature_index], region_matrix_testig), axis=1)
 prediction = clf_nn.predict(wave_coef_matrix_testig_subject)
 per_correct_nn = fun.predict_accuracy(testing_answer, prediction)
 all_per_correct_nn.append(per_correct_nn)
 
 ## each time create a folder and save trained classifer in it.
 path  = jj_path_each + '\\' + site_name + '\\'+ str(n);
 try: 
     os.makedirs(path)
 except OSError:
     if not os.path.isdir(path):
         raise
         
 #joblib.dump(clf, path+'\\clf_svm.pkl') 
 #joblib.dump(clf_DT, path+'\\clf_DT.pkl') 
 joblib.dump(clf_nn, path+'\\clf_nn.pkl') 
 
 # also slave selected feature index
        #blvg_trace_r_a = pca_a.fit(wave_coef_matrix).transform(wave_coef_matrix)

        ## start training SVM using 15 features extracted
        wave_coef_matrix_training_feature_selected = wave_coef_matrix_training[:,
                                                                               selected_feature_index]

        # train svm
        clf = svm.SVC()
        # X is the training data set
        # y is the class label
        X = wave_coef_matrix_training_feature_selected
        y = training_answer
        clf.fit(X, y)

        # use clf to perdict
        # use the same feature as training data
        out_put = clf.predict(wave_coef_matrix_testig[:,
                                                      selected_feature_index])
        # see how good it is
        per_correct = fun.predict_accuracy(testing_answer, out_put)
        # add the results to all_per_correct
        all_per_correct.append(per_correct)

    all_per_correct = np.asarray(all_per_correct)
    four_sit_result.append(all_per_correct)
###############################################################################

print('C: ' + str(np.mean(four_sit_result[2])))
print('A: ' + str(np.mean(four_sit_result[0])))
print('D: ' + str(np.mean(four_sit_result[3])))
print('B: ' + str(np.mean(four_sit_result[1])))
        # for each column the majority of 1 or 0 wins the result
        perdiction = []
        all_ones_nn = np.asarray(all_ones_nn)

        for jj in range(0, all_ones_nn.shape[1]):
            voting_results = np.count_nonzero(all_ones_nn[:, jj])
            if voting_results > m / 2:
                perdiction.append(1)
            else:
                perdiction.append(0)

        # calculate pertange correct
        perdiction = np.asarray(perdiction)

        per_correct_nn = fun.predict_accuracy(answer_sheet, perdiction)
        per_correct_SVM = fun.predict_accuracy(answer_sheet, out_put)
        per_correct_DT = fun.predict_accuracy(answer_sheet, out_put_DT)

        per_correct_ITD = np.sum(
            perdiction[:row_num / 2].astype(float)) / (row_num / 2)
        per_correct_noITD = np.sum(
            perdiction[row_num / 2:].astype(float)) / (row_num / 2)

        four_sit_result_nn.append(per_correct_nn)
        four_sit_result_nn_ITD.append(per_correct_ITD)
        four_sit_result_nn_noITD.append(per_correct_noITD)

    all_accuracy_both.append(four_sit_result_nn)
    all_accuracy_ITD.append(four_sit_result_nn_ITD)
    all_accuracy_noITD.append(four_sit_result_nn_noITD)