Example #1
0
def pima_models(x_train_p, y_train_p):
    """
    3 ML models for PIMA (scaled) data
    :param x_train_p:
    :param Y_train:
    :return:
    """
    def create_mlp():
        mlp = Sequential()
        mlp.add(Dense(60, input_dim=len(x_train_p.columns), activation='relu'))
        mlp.add(Dropout(0.2))
        mlp.add(Dense(30, input_dim=60, activation='relu'))
        mlp.add(Dropout(0.2))
        mlp.add(Dense(1, activation='sigmoid'))
        mlp.compile(loss='binary_crossentropy',
                    optimizer='adam',
                    metrics=['accuracy'])
        return mlp

    mlp = KerasClassifier(build_fn=create_mlp,
                          epochs=100,
                          batch_size=64,
                          verbose=0)
    mlp._estimator_type = "classifier"
    mlp.fit(x_train_p, y_train_p)

    rf = RandomForestClassifier(n_estimators=1000)
    rf.fit(x_train_p, y_train_p)
    sigmoidRF = CalibratedClassifierCV(
        RandomForestClassifier(n_estimators=1000), cv=5, method='sigmoid')
    sigmoidRF.fit(x_train_p, y_train_p)

    # lr = LogisticRegression(C=1.)
    # lr.fit(x_train_p, y_train_p)

    svm = SVC(kernel='linear', probability=True)
    svm.fit(x_train_p, y_train_p)

    # create a dictionary of our models
    estimators = [('rf', sigmoidRF), ('mlp', mlp), ('svm', svm)]
    # create our voting classifier, inputting our models
    ensemble = VotingClassifier(estimators, voting='soft')
    ensemble._estimator_type = "classifier"
    ensemble.fit(x_train_p, y_train_p)

    return {'mlp': mlp, 'rf': sigmoidRF, 'svm': svm, 'ensemble': ensemble}