Example #1
0
from tensorflow.keras.optimizers import Adam

from sklearn.metrics import (classification_report, confusion_matrix,
                             mean_squared_error, r2_score)

input_shape = (X_train.shape[1],)

m = Sequential([
    Dense(units = 5, activation = 'elu', input_shape = input_shape),
    Dropout(rate = 0.2),
    BatchNormalization(),
    Dense(units = 10, activation = 'elu'),
    Dense(units = 4, activation = 'softmax')
])

m.summary()

earlystopping = EarlyStopping(monitor = 'val_loss', patience = 20)

opt = Adam(learning_rate = 0.005)
m.compile(optimizer = opt, loss = 'sparse_categorical_crossentropy', metrics = ['accuracy'])

history = m.fit(X_train, y_train_label_encoded, batch_size = 50, epochs = 500, validation_split = 0.2, callbacks = [earlystopping])

plt.plot(history.history['loss'], label='training_loss')
plt.plot(history.history['val_loss'], label='validation_loss')
plt.legend()

plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
Example #2
0
    # Training
    if useTF:
        if not (usePCA or useICA or useIsomap or useLLE):
            X_train21 = X_train21.to_numpy()
        y_train21CPY = y_train21
        y_train21 = y_train21.to_numpy()

        LE1 = preprocessing.LabelEncoder()
        LE1.fit(y_train21)
        OneHot1 = OneHotEncoder()
        y_train21 = OneHot1.fit_transform(y_train21.reshape(-1, 1)).toarray()

        model21.fit(X_train21, y_train21, validation_split=0.1, epochs=nEpochs)
        if (iterationNumber == 1):
            print(model21.summary())
    else:
        model21.fit(X_train21, y_train21)
        if (iterationNumber == 1):
            print(model21)

    # Game 2: Training
    X_train22 = trainGame2.loc[:, trainGame2.columns != "playerID"]
    y_train22 = trainGame2["playerID"]
    scaler22 = preprocessing.StandardScaler().fit(X_train22)
    X_train22 = pd.DataFrame(scaler22.transform(X_train22.values),
                             columns=X_train22.columns,
                             index=X_train22.index)

    # Models
    tree = DecisionTreeClassifier(criterion='entropy', random_state=0)