Пример #1
0
#X_train, X_test, y_train, y_test = train_test_split(X1, y1, test_size=0.4, random_state=1)##balancear datos con SMOTE

#Arbol de decisión
#model = DecisionTreeClassifier(criterion='entropy')

#Forest
#model=RandomForestClassifier(n_estimators=100)

#Maquina de soporte vectorial
#model=svm.SVC(kernel='linear', C=1, probability=True)

#K vecino más cercano
model = KNeighborsClassifier(n_neighbors=10)

model.fitted = model.fit(X_train, y_train)
model.predictions = model.fitted.predict(X_test)
y_probas = model.fitted.predict_proba(X_test)

print(confusion_matrix(y_test, model.predictions))
print(accuracy_score(y_test, model.predictions))

predicted = cross_validation.cross_val_predict(model, X_normalized, Yn, cv=10)
print(accuracy_score(Yn, predicted))

skplt.metrics.plot_confusion_matrix(y_test,
                                    model.predictions,
                                    title='Matriz de confusión',
                                    figsize=(13, 13),
                                    cmap='Blues')
pyplot.show()