Esempio n. 1
0
def main():
    hidden_unit = 64
    batch_size = 32
    learning_rate = 0.001
    epochs = 50
    with open('../Din/dataset/dataset.pkl', 'rb') as f:
        train_set = np.array(pickle.load(f))
        test_set = pickle.load(f)
        cate_list = pickle.load(f)
        user_count, item_count, cate_count, max_sl = pickle.load(f)
    train_user, train_item, train_hist, train_sl, train_y = input_data(
        train_set, max_sl)
    # Tensorboard
    current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
    log_dir = 'logs/' + current_time
    tensorboard = tf.keras.callbacks.TensorBoard(log_dir=log_dir,
                                                 histogram_freq=1,
                                                 write_graph=True,
                                                 write_grads=False,
                                                 write_images=True,
                                                 embeddings_freq=0,
                                                 embeddings_layer_names=None,
                                                 embeddings_metadata=None,
                                                 embeddings_data=None,
                                                 update_freq=500)
    # model checkpoint
    check_path = 'save/wide_deep_weights.epoch_{epoch:04d}.val_loss_{val_loss:.4f}.ckpt'
    checkpoint = tf.keras.callbacks.ModelCheckpoint(check_path,
                                                    save_weights_only=True,
                                                    verbose=1,
                                                    period=1)

    model = WideDeep(user_count, item_count, cate_count, cate_list,
                     hidden_unit)
    model.summary()
    optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
    model.compile(loss=tf.keras.losses.binary_crossentropy,
                  optimizer=optimizer,
                  metrics=[tf.keras.metrics.AUC()])
    model.fit([train_user, train_item, train_hist, train_sl],
              train_y,
              epochs=epochs,
              batch_size=batch_size,
              validation_split=0.1,
              callbacks=[tensorboard, checkpoint])
Esempio n. 2
0
check_path = './save/deepfm_weight.epoch_{epoch:4d}.val_loss_{val_loss:.4f}.ckpt'
checkpoint = tf.keras.callbacks.ModelCheckpoint(check_path,
                                                save_weights_only=True,
                                                verbose=1,
                                                period=5)

# ------------ model evaluate ------------
METRICS = [
    tf.keras.metrics.BinaryAccuracy(name='accuracy'),
    tf.keras.metrics.Precision(name='precision'),
    tf.keras.metrics.Recall(name='recall'),
    tf.keras.metrics.AUC(name='auc'),
]

model.compile(loss=tf.keras.losses.binary_crossentropy,
              optimizer=tf.keras.optimizers.Adam(learning_rate=learning_rate),
              metrics=METRICS)

# ---------早停法 -----
early_stopping = tf.keras.callbacks.EarlyStopping(monitor='val_auc',
                                                  verbose=1,
                                                  patience=10,
                                                  mode='max',
                                                  restore_best_weights=True)

model.fit(
    train_X,
    train_y,
    epochs=epochs,
    # callbacks=[early_stopping, checkpoint],
    batch_size=batch_size,
Esempio n. 3
0
                                                      sample_num=sample_num,
                                                      test_size=test_size)
 train_X, train_y = train
 test_X, test_y = test
 # ============================Build Model==========================
 model = WideDeep(feature_columns,
                  hidden_units=hidden_units,
                  dnn_dropout=dnn_dropout)
 model.summary()
 # ============================model checkpoint======================
 # check_path = '../save/wide_deep_weights.epoch_{epoch:04d}.val_loss_{val_loss:.4f}.ckpt'
 # checkpoint = tf.keras.callbacks.ModelCheckpoint(check_path, save_weights_only=True,
 #                                                 verbose=1, period=5)
 # ============================Compile============================
 model.compile(loss=binary_crossentropy,
               optimizer=Adam(learning_rate=learning_rate),
               metrics=[AUC()])
 # ==============================Fit==============================
 model.fit(
     train_X,
     train_y,
     epochs=epochs,
     callbacks=[
         EarlyStopping(monitor='val_loss',
                       patience=1,
                       restore_best_weights=True)
     ],  # checkpoint
     batch_size=batch_size,
     validation_split=0.1)
 # ===========================Test==============================
 print('test AUC: %f' % model.evaluate(test_X, test_y)[1])
Esempio n. 4
0
import pickle
from sklearn.metrics import log_loss, roc_auc_score
import tensorflow as tf
from builtins import int
# from loss import auc

model = WideDeep()


def auc(y_true, y_pred):
    return tf.py_func(roc_auc_score, (y_true, y_pred), tf.double)


#model.compile(optimizer='rmsprop', loss=losses.mse, metrics=["mse"],)
model.compile(optimizer="adam",
              loss="binary_crossentropy",
              metrics=[auc, 'accuracy'])
#
# wide_features=pd.read_csv('data/path_matrix.txt', sep='  ', header=None)
# deep_features=pd.read_csv('data/sns_dense.csv', sep=',', header=0)
#
# for index, row in wide_features.iterrows():
#     if not np.any(row):
#         row[:]=8
#         print('normalize')
#
# maxinterval=deep_features['interval'].max()
# print(maxinterval)
# for index, row in deep_features.iterrows():
#     if row['interval'] == 0:
#         row['interval'] = maxinterval