Example #1
0
def gradient_boosting():
    kfold = model_selection.StratifiedKFold(n_splits=10, random_state=True)

    scoreings = []

    for train_index, test_index in kfold.split(X, y):
        # print("Train:", train_index, "Validation:", test_index)
        X_t, X_test = X[train_index], X[test_index]
        y_t, y_test = y[train_index], y[test_index]

        GSMOTE = EGSmote()
        X_train, y_train = GSMOTE.fit_resample(X_t, y_t)
        gbc = GradientBoostingClassifier(n_estimators=100,
                                         learning_rate=0.01,
                                         max_depth=3)
        gbc.fit(X_train, y_train)

        # Predicting the Test set results
        y_predict = gbc.predict(X_test)
        y_pred = np.where(y_predict > 0.5, 1, 0)

        scoreings.append(evaluate2(y_test, y_pred))

    scoreings = np.asarray(scoreings)
    fscores = scoreings[:, 0]
    gmean = scoreings[:, 1]
    auc = scoreings[:, 2]

    return ["GBC", fscores.mean(), gmean.mean(), auc.mean()]
Example #2
0
def logistic_training():

    kfold = model_selection.StratifiedKFold(n_splits=10, random_state=True)

    scoreings = []

    for train_index, test_index in kfold.split(X, y):
        # print("Train:", train_index, "Validation:", test_index)
        X_t, X_test = X[train_index], X[test_index]
        y_t, y_test = y[train_index], y[test_index]

        GSMOTE = EGSmote()
        X_train, y_train = GSMOTE.fit_resample(X_t, y_t)
        regressor = LogisticRegression(max_iter=120)
        regressor.fit(X_train, y_train)

        # Predicting the Test set results
        y_predict = regressor.predict(X_test)
        y_pred = np.where(y_predict > 0.5, 1, 0)

        scoreings.append(evaluate2(y_test, y_pred))
    scoreings = np.asarray(scoreings)
    fscores = scoreings[:, 0]
    gmean = scoreings[:, 1]
    auc = scoreings[:, 2]

    return ["LR", fscores.mean(), gmean.mean(), auc.mean()]
Example #3
0
def KNN():

    # Fitting Simple Linear Regression to the Training set

    kfold = model_selection.StratifiedKFold(n_splits=10, random_state=True)

    scoreings = []

    for train_index, test_index in kfold.split(X, y):
        # print("Train:", train_index, "Validation:", test_index)
        X_t, X_test = X[train_index], X[test_index]
        y_t, y_test = y[train_index], y[test_index]

        GSMOTE = EGSmote()
        X_train, y_train = GSMOTE.fit_resample(X_t, y_t)
        classifier = KNeighborsClassifier(n_neighbors=5,
                                          metric='minkowski',
                                          p=2)
        classifier.fit(X_train, y_train)

        # Predicting the Test set results
        y_pred = classifier.predict(X_test)

        scoreings.append(evaluate2(y_test, y_pred))

    scoreings = np.asarray(scoreings)
    fscores = scoreings[:, 0]
    gmean = scoreings[:, 1]
    auc = scoreings[:, 2]

    return ["KNN", fscores.mean(), gmean.mean(), auc.mean()]
def KNN(X_train, y_train, X_test, y_test):

    classifier = KNeighborsClassifier(n_neighbors=5, metric='minkowski')
    classifier.fit(X_train, y_train)

    # Predicting the Test set results
    y_pred = classifier.predict(X_test)

    return evaluate2("KNN", y_test, y_pred)
def gradient_boosting(X_train, y_train, X_test, y_test):

    gbc = GradientBoostingClassifier(n_estimators=100)
    gbc.fit(X_train, y_train)

    # Predicting the Test set results
    y_predict = gbc.predict(X_test)
    y_pred = np.where(y_predict > 0.5, 1, 0)

    return evaluate2("GBC", y_test, y_pred)
Example #6
0
def decision_tree(X_train, y_train, X_test, y_test):

    regressor = DecisionTreeRegressor()
    regressor.fit(X_train, y_train)

    # Predicting the Test set results
    y_predict = regressor.predict(X_test)
    y_pred = np.where(y_predict > 0.5, 1, 0)

    return evaluate2("DT", y_test, y_pred)
Example #7
0
def logistic_training(X_train, y_train, X_test, y_test):

    regressor = LogisticRegression()
    regressor.fit(X_train, y_train)

    # Predicting the Test set results
    y_predict = regressor.predict(X_test)
    y_pred = np.where(y_predict > 0.5, 1, 0)

    return evaluate2("LR", y_test, y_pred)
def logistic_training(X_train, y_train, X_test, y_test):

    regressor = LogisticRegression(max_iter=150, C=2, solver='liblinear')
    regressor.fit(X_train, y_train)

    # Predicting the Test set results
    y_predict = regressor.predict(X_test)
    y_pred = np.where(y_predict > 0.5, 1, 0)
    cm = confusion_matrix(y_test, y_predict)
    print(cm)
    return evaluate2("LR", y_test, y_pred)
Example #9
0
def XGBoost(X_train, y_train, X_test, y_test):

    # Fitting X-Gradient boosting
    gbc = xgb.XGBClassifier(objective="binary:logistic", random_state=42)
    gbc.fit(X_train, y_train)

    # Predicting the Test set results
    y_predict = gbc.predict(X_test)
    y_pred = np.where(y_predict.astype(int) > 0.5, 1, 0)

    return evaluate2("XGBoost", y_test, y_pred)
def decision_tree(X_train, y_train, X_test, y_test):

    regressor = DecisionTreeClassifier(criterion="entropy",
                                       max_features="auto",
                                       min_samples_leaf=0.00005)
    regressor.fit(X_train, y_train)

    # Predicting the Test set results
    y_predict = regressor.predict(X_test)
    y_pred = np.where(y_predict > 0.5, 1, 0)
    cm = confusion_matrix(y_test, y_predict)
    print(cm)
    return evaluate2("DT", y_test, y_pred)
def XGBoost(X_train, y_train, X_test, y_test):

    # Fitting X-Gradient boosting

    #
    gbc = xgb.XGBClassifier(scale_pos_weight=99,
                            missing=999999,
                            max_depth=3,
                            colsample_bytree=0.8)
    eval_set = [(X_train, y_train), (X_test, y_test)]
    gbc.fit(X_train,
            y_train,
            eval_metric="auc",
            eval_set=eval_set,
            verbose=True,
            early_stopping_rounds=5)

    # Predicting the Test set results
    y_predict = gbc.predict(X_test)
    y_pred = np.where(y_predict.astype(int) > 0.5, 1, 0)
    # cm = confusion_matrix(y_test, y_pred)
    # print(cm)
    return evaluate2("XGBoost", y_test, y_pred)