Ejemplo n.º 1
0
def main():
    hidden_unit = 64
    batch_size = 32
    learning_rate = 0.001
    epochs = 50
    with open('../Din/dataset/dataset.pkl', 'rb') as f:
        train_set = np.array(pickle.load(f))
        test_set = pickle.load(f)
        cate_list = pickle.load(f)
        user_count, item_count, cate_count, max_sl = pickle.load(f)
    train_user, train_item, train_hist, train_sl, train_y = input_data(
        train_set, max_sl)
    # Tensorboard
    current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
    log_dir = 'logs/' + current_time
    tensorboard = tf.keras.callbacks.TensorBoard(log_dir=log_dir,
                                                 histogram_freq=1,
                                                 write_graph=True,
                                                 write_grads=False,
                                                 write_images=True,
                                                 embeddings_freq=0,
                                                 embeddings_layer_names=None,
                                                 embeddings_metadata=None,
                                                 embeddings_data=None,
                                                 update_freq=500)
    # model checkpoint
    check_path = 'save/wide_deep_weights.epoch_{epoch:04d}.val_loss_{val_loss:.4f}.ckpt'
    checkpoint = tf.keras.callbacks.ModelCheckpoint(check_path,
                                                    save_weights_only=True,
                                                    verbose=1,
                                                    period=1)

    model = WideDeep(user_count, item_count, cate_count, cate_list,
                     hidden_unit)
    model.summary()
    optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
    model.compile(loss=tf.keras.losses.binary_crossentropy,
                  optimizer=optimizer,
                  metrics=[tf.keras.metrics.AUC()])
    model.fit([train_user, train_item, train_hist, train_sl],
              train_y,
              epochs=epochs,
              batch_size=batch_size,
              validation_split=0.1,
              callbacks=[tensorboard, checkpoint])
Ejemplo n.º 2
0
model.compile(loss=tf.keras.losses.binary_crossentropy,
              optimizer=tf.keras.optimizers.Adam(learning_rate=learning_rate),
              metrics=METRICS)

# ---------早停法 -----
early_stopping = tf.keras.callbacks.EarlyStopping(monitor='val_auc',
                                                  verbose=1,
                                                  patience=10,
                                                  mode='max',
                                                  restore_best_weights=True)

model.fit(
    train_X,
    train_y,
    epochs=epochs,
    # callbacks=[early_stopping, checkpoint],
    batch_size=batch_size,
    validation_split=0.1,
    validation_data=(val_X, val_y),
    # class_weight={0:1, 1:3}, # 样本均衡
)

print('test AUC: %f' % model.evaluate(test_X, test_y)[1])

# ------------- model evaluation in test dataset ----

train_predictions_weighted = model.predict(train_X, batch_size=batch_size)
test_predictions_weighted = model.predict(test_X, batch_size=batch_size)

# ------------- confusion matrix
from sklearn.metrics import confusion_matrix, roc_curve
import matplotlib.pyplot as plt
Ejemplo n.º 3
0
                                                         test_size=test_size)
    train_X, train_y = train
    test_X, test_y = test
    # ============================Build Model==========================
    model = WideDeep(feature_columns,
                     hidden_units=hidden_units,
                     dnn_dropout=dnn_dropout)
    model.summary()
    # ============================model checkpoint======================
    # check_path = '../save/wide_deep_weights.epoch_{epoch:04d}.val_loss_{val_loss:.4f}.ckpt'
    # checkpoint = tf.keras.callbacks.ModelCheckpoint(check_path, save_weights_only=True,
    #                                                 verbose=1, period=5)
    # ============================Compile============================
    model.compile(loss=binary_crossentropy,
                  optimizer=Adam(learning_rate=learning_rate),
                  metrics=[AUC()])
    # ==============================Fit==============================
    model.fit(
        train_X,
        train_y,
        epochs=epochs,
        callbacks=[
            EarlyStopping(monitor='val_loss',
                          patience=1,
                          restore_best_weights=True)
        ],  # checkpoint
        batch_size=batch_size,
        validation_split=0.1)
    # ===========================Test==============================
    print('test AUC: %f' % model.evaluate(test_X, test_y)[1])
Ejemplo n.º 4
0
        input_mms = MinMaxScaler(feature_range=(0, 1))
        deep_input_feats.iloc[:, [i]] = input_mms.fit_transform(
            deep_input_feats.iloc[:, [i]])

deep_input.append(wide_features.values)

my_callbacks = [
    EarlyStopping(monitor='loss',
                  min_delta=1e-5,
                  patience=3,
                  verbose=1,
                  mode='min')
]
model.fit(x=deep_input,
          y=final_tags,
          batch_size=64,
          epochs=100,
          verbose=1,
          callbacks=my_callbacks)

pred = model.predict(deep_input, batch_size=2**14)

pred_real = target_mms.inverse_transform(pred)

s0, s1 = 0, 0
for i, x in enumerate(pred_real):
    if pred[i] > 0:
        s1 += 1
        print(deep_features.iloc[i, 0], deep_features.iloc[i, -1],
              final_tags[i], pred[i])
    else:
        s0 += 1
Ejemplo n.º 5
0
    # ========================== Create dataset =======================
    feature_columns, train, test = create_criteo_dataset(file=file,
                                                         embed_dim=embed_dim,
                                                         read_part=read_part,
                                                         sample_num=sample_num,
                                                         test_size=test_size)
    train_X, train_y = train
    test_X, test_y = test
    # ============================Build Model==========================
    model = WideDeep(feature_columns, hidden_units=hidden_units, dnn_dropout=dnn_dropout)
    model.summary()
    # ============================model checkpoint======================
    # check_path = '../save/wide_deep_weights.epoch_{epoch:04d}.val_loss_{val_loss:.4f}.ckpt'
    # checkpoint = tf.keras.callbacks.ModelCheckpoint(check_path, save_weights_only=True,
    #                                                 verbose=1, period=5)
    # ============================Compile============================
    model.compile(loss=binary_crossentropy, optimizer=Adam(learning_rate=learning_rate),
                  metrics=[AUC()])
    # ==============================Fit==============================
    model.fit(
        train_X,
        train_y,
        epochs=epochs,
        # callbacks=[checkpoint],
        batch_size=batch_size,
        validation_split=0.1
    )
    # ===========================Test==============================
    print('test AUC: %f' % model.evaluate(test_X, test_y)[1])