gamma = [0.01, 0.001, 0.0001]

#Optimize the linear classifier first

f = open('./GridSearch_output', 'w')
f.write('PCA components: %d\n' % (n_components))
write2 = 'X_train: %d, X_test: %d\n' % (X_train_PCA.shape[0], X_test_PCA.shape[0])
f.write(write2)
f.write("Scores: mAP, accuracy_score, avg_Precision, avg_Recall\n\n")

for c in C:
    #Optimize the linear kernel first
    for lo in loss:
        for pen in penalty:
            classifier = OneVsRestClassifier(LinearSVC(random_state=0, C=c, loss=lo, penalty=pen)).fit(X_train_PCA, Y_train)
            Scores = classify_library.metric_scores(classifier, X_test_PCA, Y_test)
            setting = "Settings: Linear SVM, C: %d, loss: %s, penalty: %s\n" % (c,lo,pen)
            score = "Scores: (%f, %f, %f, %f)\n" % (Scores[0], Scores[1], Scores[2], Scores[3])
            f.write(setting)
            f.write(score)
            f.write('\n')
    #Optimize the non-linear kernels
    for ker in kernel:
        for gam in gamma:
            classifier = OneVsRestClassifier(svm.SVC(random_state=0, C=c, kernel=ker, gamma=gam)).fit(X_train_PCA, Y_train)
            Scores = classify_library.metric_scores(classifier, X_test_PCA, Y_test)
            setting = "Settings: SVM kernel: %s, C: %d, gamma: %f\n" % (ker,c,gam)
            score = "Scores: (%f, %f, %f, %f)\n" % (Scores[0], Scores[1], Scores[2], Scores[3])
            f.write(setting)
            f.write(score)
            f.write('\n')
Esempio n. 2
0
                    best_ker = ker
                    best_gam = gam
                print 'Non-linear: C = ' + str(
                    c) + ', kernel = ' + ker + ', Gamma = ' + str(gam)
                print 'Precisions of CV = ' + str(scores)
                print 'Mean Precision = ' + str(scores.mean())
                print 'Confusion Matrix = '
                print conf_mat
                print '\n-----------------------------------------------------\n'
    print '\n########################################################\n'
    print 'BEST LINEAR SVM (CV precision = ' + str(best_linear * 100) + ' %)\n'
    #Best Linear SVM
    classifier_l = OneVsRestClassifier(
        LinearSVC(random_state=0, C=best_c_l, loss=best_loss,
                  penalty=best_pen)).fit(X_train_final, Y_train)
    Scores = classify_library.metric_scores(classifier_l, X_test_final, Y_test)
    print "Settings: Linear SVM, C: %d, loss: %s, penalty: %s" % (
        best_c_l, best_loss, best_pen)
    print "Scores in test: (%f, %f, %f, %f)\n" % (Scores[0], Scores[1],
                                                  Scores[2], Scores[3])
    print "Confusion Matrix = "
    print conf_mat_l

    print '\n########################################################\n'
    print 'BEST NON-LINEAR SVM (CV precision = ' + str(
        best_non_linear * 100) + ' %)\n'
    #Best non-linear SVM
    classifier_nl = OneVsRestClassifier(
        svm.SVC(random_state=0, C=best_c_nl, kernel=best_ker,
                gamma=best_gam)).fit(X_train_final, Y_train)
    Scores = classify_library.metric_scores(classifier_nl, X_test_final,
Esempio n. 3
0
X_train, Y_train = make_frame_matrix(training, training_output, class_index)
X_test, Y_test = make_frame_matrix(testing, testing_output, class_index)

### Reduced PCA dimension to 1000

# In[18]:

X_PCA, _ = make_frame_matrix(training_PCA, training_output, class_index)
pca = PCA(n_components=1000)
pca.fit(X_PCA)
X_train_PCA = pca.transform(X_train)
X_test_PCA = pca.transform(X_test)

# In[22]:

classifier = OneVsRestClassifier(
    LinearSVC(random_state=0, C=1, loss='l2',
              penalty='l2')).fit(X_train_PCA, Y_train)
classify_library.metric_scores(classifier, X_test_PCA, Y_test, verbose=True)

# In[27]:

baseline_file = "./baseline"
np.savez(baseline_file,
         X_train_PCA=X_train_PCA,
         X_test_PCA=X_test_PCA,
         Y_train=Y_train,
         Y_test=Y_test)

# In[ ]:
Esempio n. 4
0
f = open('./GridSearch_output', 'w')
f.write('PCA components: %d\n' % (n_components))
write2 = 'X_train: %d, X_test: %d\n' % (X_train_PCA.shape[0],
                                        X_test_PCA.shape[0])
f.write(write2)
f.write("Scores: mAP, accuracy_score, avg_Precision, avg_Recall\n\n")

for c in C:
    #Optimize the linear kernel first
    for lo in loss:
        for pen in penalty:
            classifier = OneVsRestClassifier(
                LinearSVC(random_state=0, C=c, loss=lo,
                          penalty=pen)).fit(X_train_PCA, Y_train)
            Scores = classify_library.metric_scores(classifier, X_test_PCA,
                                                    Y_test)
            setting = "Settings: Linear SVM, C: %d, loss: %s, penalty: %s\n" % (
                c, lo, pen)
            score = "Scores: (%f, %f, %f, %f)\n" % (Scores[0], Scores[1],
                                                    Scores[2], Scores[3])
            f.write(setting)
            f.write(score)
            f.write('\n')
    #Optimize the non-linear kernels
    for ker in kernel:
        for gam in gamma:
            classifier = OneVsRestClassifier(
                svm.SVC(random_state=0, C=c, kernel=ker,
                        gamma=gam)).fit(X_train_PCA, Y_train)
            Scores = classify_library.metric_scores(classifier, X_test_PCA,
                                                    Y_test)
Esempio n. 5
0
X_test, Y_test = make_frame_matrix(testing,testing_output,class_index)


### Reduced PCA dimension to 1000

# In[18]:

X_PCA, _ = make_frame_matrix(training_PCA, training_output, class_index)
pca = PCA(n_components=1000)
pca.fit(X_PCA)
X_train_PCA = pca.transform(X_train)
X_test_PCA = pca.transform(X_test)


# In[22]:

classifier = OneVsRestClassifier(LinearSVC(random_state=0, C=1, loss='l2', penalty='l2')).fit(X_train_PCA, Y_train)
classify_library.metric_scores(classifier, X_test_PCA, Y_test, verbose=True)


# In[27]:

baseline_file = "./baseline"
np.savez(baseline_file, X_train_PCA=X_train_PCA, X_test_PCA=X_test_PCA, Y_train=Y_train, Y_test=Y_test)


# In[ ]:



Esempio n. 6
0
    flname_test = '/home/zhenyang/Workspace/data/UCF101/features/UCF101_test1.fv'
    if os.path.exists(flname_test + '.npz'):
        data = np.load(flname_test + '.npz')
        X_test = data['X_test']
        Y_test = data['Y_test']
    else:
        X_test, Y_test = make_FV_matrix(videos_test, fv_dir, labels_test)
        np.savez(flname_test, X_test=X_test, Y_test=Y_test)

    # TRAINING
    model_file = '/home/zhenyang/Workspace/data/UCF101/models/UCF101_linearsvm_traintest1.model'
    if os.path.exists(model_file + '.pkl'):
        with open(model_file + '.pkl', 'r') as fp:
            classifier = pickle.load(fp)

    else:
        estimator = OneVsRestClassifier(
            LinearSVC(random_state=0, C=100, loss='l1', penalty='l2'))
        classifier = estimator.fit(X_train, Y_train)
        # store the model in a pickle file
        with open(model_file + '.pkl', 'w') as fp:
            pickle.dump(classifier, fp)

    # TESTING
    result_file = '/home/zhenyang/Workspace/data/UCF101/results/UCF101_linearsvm_traintest1.result'
    results = classify_library.metric_scores(classifier,
                                             X_test,
                                             Y_test,
                                             verbose=True)
    print results
        X_train, Y_train = make_FV_matrix(videos_train, fv_dir, labels_train)
        np.savez(flname_train, X_train=X_train, Y_train=Y_train)

    flname_test = '/home/zhenyang/Workspace/data/UCF101/features/UCF101_test1.fv'
    if os.path.exists(flname_test+'.npz'):
        data = np.load(flname_test+'.npz')
        X_test = data['X_test']
        Y_test = data['Y_test']
    else:
        X_test, Y_test = make_FV_matrix(videos_test, fv_dir, labels_test)
        np.savez(flname_test, X_test=X_test, Y_test=Y_test)

    # TRAINING
    model_file = '/home/zhenyang/Workspace/data/UCF101/models/UCF101_linearsvm_traintest1.model'
    if os.path.exists(model_file+'.pkl'):
        with open(model_file+'.pkl', 'r') as fp:
            classifier = pickle.load(fp)

    else:
        estimator = OneVsRestClassifier(LinearSVC(random_state=0, C=100, loss='l1', penalty='l2'))
        classifier = estimator.fit(X_train, Y_train)
        # store the model in a pickle file
        with open(model_file+'.pkl', 'w') as fp:
            pickle.dump(classifier, fp)

    # TESTING
    result_file = '/home/zhenyang/Workspace/data/UCF101/results/UCF101_linearsvm_traintest1.result'
    results = classify_library.metric_scores(classifier, X_test, Y_test, verbose=True)
    print results