Beispiel #1
0
trainingLabels = []
testData = []
testLabels = []
for i in range(int(len(windowedData))):
    if i / len(windowedData) < 0.8:
        trainingData.append(windowedData[index[i]])
        trainingLabels.append(windowLabels[index[i]])
    else:
        testData.append(windowedData[index[i]])
        testLabels.append(windowLabels[index[i]])

trainingData = a.initFeatNormalization(inputData=trainingData)

for i in range(len(testData)):
    testData[i] = a.featureNormalization(
        features=a.extractFeatures(data=testData[i]), initDone=True)

from sklearn.model_selection import StratifiedKFold
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import train_test_split
#model = xgboost.XGBClassifier(max_depth=3, learning_rate=0.3)
from sklearn.neural_network import MLPClassifier

model = svm.SVC(kernel='rbf')
#from sklearn.neighbors import KNeighborsClassifier
#model = KNeighborsClassifier(n_neighbors=4, metric='euclidean')
kfold = StratifiedKFold(n_splits=3)
results = cross_val_score(model,
                          np.array(trainingData),
                          np.array(trainingLabels),
                          cv=kfold)
Beispiel #2
0
random.shuffle(index)

trainingData = []
trainingLabels = []
testData = []
testLabels = []
for i in range(int(len(windowedData))):
    if i/len(windowedData) < 0.8:
        trainingData.append(windowedData[index[i]])
        trainingLabels.append(windowLabels[index[i]])
    else:
        testData.append(windowedData[index[i]])
        testLabels.append(windowLabels[index[i]])

trainingData = a.initFeatNormalization(inputData=trainingData)

for i in range(len(testData)):
    testData[i] = a.featureNormalization(features=a.extractFeatures(data=testData[i]), initDone=True)

from sklearn.model_selection import StratifiedKFold
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import train_test_split
#model = xgboost.XGBClassifier(max_depth=3, learning_rate=0.3)
from sklearn.neural_network import MLPClassifier
model = XGBClassifier()
#from sklearn.neighbors import KNeighborsClassifier
#model = KNeighborsClassifier(n_neighbors=4, metric='euclidean')
kfold = StratifiedKFold(n_splits=3)
results = cross_val_score(model, np.array(trainingData), np.array(trainingLabels), cv=kfold)
print("Accuracy: %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))