Пример #1
0
                                                         test_size=test_size)
    train_X, train_y = train
    test_X, test_y = test
    # ============================Build Model==========================
    model = WideDeep(feature_columns,
                     hidden_units=hidden_units,
                     dnn_dropout=dnn_dropout)
    model.summary()
    # ============================model checkpoint======================
    # check_path = '../save/wide_deep_weights.epoch_{epoch:04d}.val_loss_{val_loss:.4f}.ckpt'
    # checkpoint = tf.keras.callbacks.ModelCheckpoint(check_path, save_weights_only=True,
    #                                                 verbose=1, period=5)
    # ============================Compile============================
    model.compile(loss=binary_crossentropy,
                  optimizer=Adam(learning_rate=learning_rate),
                  metrics=[AUC()])
    # ==============================Fit==============================
    model.fit(
        train_X,
        train_y,
        epochs=epochs,
        callbacks=[
            EarlyStopping(monitor='val_loss',
                          patience=1,
                          restore_best_weights=True)
        ],  # checkpoint
        batch_size=batch_size,
        validation_split=0.1)
    # ===========================Test==============================
    print('test AUC: %f' % model.evaluate(test_X, test_y)[1])
Пример #2
0
                                                  patience=10,
                                                  mode='max',
                                                  restore_best_weights=True)

model.fit(
    train_X,
    train_y,
    epochs=epochs,
    # callbacks=[early_stopping, checkpoint],
    batch_size=batch_size,
    validation_split=0.1,
    validation_data=(val_X, val_y),
    # class_weight={0:1, 1:3}, # 样本均衡
)

print('test AUC: %f' % model.evaluate(test_X, test_y)[1])

# ------------- model evaluation in test dataset ----

train_predictions_weighted = model.predict(train_X, batch_size=batch_size)
test_predictions_weighted = model.predict(test_X, batch_size=batch_size)

# ------------- confusion matrix
from sklearn.metrics import confusion_matrix, roc_curve
import matplotlib.pyplot as plt
import seaborn as sns


def plot_cm(labels, predictions, p=0.5):
    cm = confusion_matrix(labels, predictions > p)
    plt.figure(figsize=(5, 5))
Пример #3
0
    # ============================Build Model==========================
    mirrored_strategy = tf.distribute.MirroredStrategy()
    with mirrored_strategy.scope():
        model = WideDeep(feature_columns,
                         hidden_units=hidden_units,
                         dnn_dropout=dnn_dropout)
        model.summary()
        # ============================Compile============================
        model.compile(loss=binary_crossentropy,
                      optimizer=Adam(learning_rate=learning_rate),
                      metrics=[AUC()])
    # ============================model checkpoint======================
    # check_path = '../save/wide_deep_weights.epoch_{epoch:04d}.val_loss_{val_loss:.4f}.ckpt'
    # checkpoint = tf.keras.callbacks.ModelCheckpoint(check_path, save_weights_only=True,
    #                                                 verbose=1, period=5)
    # ==============================Fit==============================
    model.fit(
        train_X,
        train_y,
        epochs=epochs,
        callbacks=[
            EarlyStopping(monitor='val_loss',
                          patience=2,
                          restore_best_weights=True)
        ],  # checkpoint
        batch_size=batch_size,
        validation_split=0.1)
    # ===========================Test==============================
    print('test AUC: %f' %
          model.evaluate(test_X, test_y, batch_size=batch_size)[1])