Beispiel #1
0
 def setUp(self):
     self.learner = knn.kNNLearner(distance_constructor=Euclidean())
    svmCnt = []
    knnCnt = []
    traFile = "C:\\Users\\Parker\\Documents\\NPS Projects\\thesis\\status\\Databases\\train\\%strain.csv" % str(i+1)
    testFile = "C:\\Users\\Parker\\Documents\\NPS Projects\\thesis\\status\\Databases\\train\\%stest.csv" % str(i+1)
    train = data.Table(traFile)
    test = data.Table(testFile)

    svmLearner = svm.SVMLearner(svm_type=svm.SVMLearner.C_SVC, kernel_type=svm.SVMLearner.RBF, kernel_func=None, \
               C=1, nu=0.5, p=0.1, gamma=0.0, degree=3, coef0=0, \
               shrinking=True, probability=True, verbose=False, \
               cache_size=200, eps=0.001, normalization=False)

    svmLearner.tune_parameters(train, parameters=["gamma","C"], folds=8)

    svmClassifier = svmLearner(train)
    knnClassifier = knn.kNNLearner(train, k=8)

    for t in test:
        svmCnt.append(svmClassifier(t))
        knnCnt.append(knnClassifier(t))

    voteIdx = 0
    imp = (length - gen)
    svmVotes = np.zeros(length/numV) #create arrays for each voting block
    knnVotes = np.zeros(length/numV)

    j = 0
    while j < length : #for the length of the vector
        svm_neigh = 0
        knn_neigh = 0
Beispiel #3
0
 def setUp(self):
     self.learner = knn.kNNLearner(distance_constructor=Euclidean())
Beispiel #4
0
def knn():
    return kNNLearner()