def KNN():

    # Fitting Simple Linear Regression to the Training set
    classifier = KNeighborsClassifier(n_neighbors=5, metric='minkowski', p=2)
    classifier.fit(X_train, y_train)

    # Predicting the Test set results
    y_pred = classifier.predict(X_test).astype(int)

    return evaluate_Comparison("KNN", y_test, y_pred)
def decision_tree():

    # Fitting Simple Linear Regression to the Training set
    regressor = DecisionTreeRegressor()
    regressor.fit(X_train, y_train)

    # Predicting the Test set results
    y_predict = regressor.predict(X_test)
    y_pred = np.where(y_predict > 0.5, 1, 0)

    return evaluate_Comparison("Decision Tree", y_test, y_pred)
def logistic_training():

    # Fitting Simple Linear Regression to the Training set
    regressor = LogisticRegression()
    regressor.fit(X_train, y_train)

    # Predicting the Test set results
    y_predict = regressor.predict(X_test)
    y_pred = np.where(y_predict > 0.5, 1, 0)

    return evaluate_Comparison("Logistic Regression", y_test, y_pred)
def gradient_boosting():

    # Fitting Gradient boosting
    gbc = GradientBoostingClassifier(n_estimators=100,
                                     learning_rate=0.01,
                                     max_depth=3)
    gbc.fit(X_train, y_train)

    # Predicting the Test set results
    y_predict = gbc.predict(X_test)
    y_pred = np.where(y_predict.astype(int) > 0.5, 1, 0)

    return evaluate_Comparison("Gradient Boosting", y_test, y_pred)
def GaussianMixture_model():
    from sklearn.mixture import GaussianMixture
    gmm = GaussianMixture(n_components=1)
    gmm.fit(X_train[y_train == 0])

    OKscore = gmm.score_samples(X_train[y_train == 0])
    threshold = OKscore.mean() - 1 * OKscore.std()

    score = gmm.score_samples(X_test)

    # majority_correct = len(score[(y_test == 1) & (score > thred)])
    y_pred = np.where(score < threshold, 1, 0)
    return evaluate_Comparison("GaussianMixture_model", y_test, y_pred)