Exemple #1
0
def get_callbacks(name, data_tr, data_val):
    save = ModelCheckpoint(name,
                           save_best_only=True,
                           monitor='val_roc_auc',
                           mode='max')
    auc = roc_callback(data_tr, data_val)
    early = EarlyStopping(monitor='val_roc_auc',
                          min_delta=0.001,
                          patience=3,
                          mode='max')
    return [auc, save, early]
# 构建deep部分
input_deep = Input(shape=(train_X_lstm.shape[1], train_X_lstm.shape[2]))
deep_layer1 = LSTM(32)(input_deep)
deep_layer2 = Dropout(0.2)(deep_layer1)
deep_layer3 = Dense(16, activation='relu')(deep_layer2)
deep_layer4 = Dropout(0.2)(deep_layer3)
deep = Dense(4, activation='sigmoid')(deep_layer4)
# 组合deep&wide
coned = concatenate([wide, deep])
out = Dense(1, activation='sigmoid')(coned)
model = Model(inputs=[input_wide, input_deep], outputs=out)
model.compile(optimizer='adam',
              loss='mean_squared_error',
              metrics=['accuracy', utils.rmse])
# mean_squared_error binary_crossentropy
model.fit(
    [train_X_mlr, train_X_lstm],
    y,
    epochs=100,
    batch_size=10,
    callbacks=[
        utils.roc_callback(training_data=[[train_X_mlr, train_X_lstm], y],
                           validation_data=[[test_X_mlr, test_X_lstm],
                                            test_y]),
        TensorBoard(log_dir='final/{}'.format("mlr_4_lstm_32-16-4_d0.2_gbdt"))
    ])
loss, accuracy, rmse = model.evaluate([test_X_mlr, test_X_lstm], test_y)
print('Accuracy: %.2f %%' % (accuracy * 100))
print('RMSE: %.2f %%' % (rmse * 100))
print('Loss: %.2f %%' % (loss * 100))
Exemple #3
0
wide_m = 12
input_wide = Input(shape=(X.shape[1], ))
# 第二层为LR和权重层,采用l2正则化项
wide_divide = Dense(wide_m,
                    activation='softmax',
                    bias_regularizer=regularizers.l2(0.01))(input_wide)
wide_fit = Dense(wide_m,
                 activation='sigmoid',
                 bias_regularizer=regularizers.l2(0.01))(input_wide)
wide_ele = multiply([wide_divide, wide_fit])
out = Lambda(keras_sum_layer,
             output_shape=keras_sum_layer_output_shape)(wide_ele)
model = Model(inputs=input_wide, outputs=out)
model.compile(optimizer='adam',
              loss='mean_squared_error',
              metrics=['accuracy'])
model.fit(X,
          y,
          epochs=10,
          batch_size=2,
          callbacks=[
              utils.roc_callback(training_data=[X, y], validation_data=[X, y])
          ])
model.save(MODEL_PATH)
# model_json = model.to_json()
# with open('model.json', 'w') as file:
#     file.write(model_json)
# model.save_weights('model.json.h5')
print("训练完毕")
Exemple #4
0
from keras.layers import LSTM, Dropout
from keras.callbacks import TensorBoard
import preprocess as preprocess
import utils as utils
from keras import regularizers

train_X, train_y = preprocess.get_LSTM_data(path="./process_train.csv")
test_X, test_y = preprocess.get_LSTM_data()
model = Sequential()
model.add(LSTM(
    24,
    input_shape=(train_X.shape[1], train_X.shape[2]),
))
model.add(Dropout(0.3))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='mean_squared_error',
              optimizer='adam',
              metrics=['accuracy', utils.rmse])
model.fit(train_X,
          train_y,
          epochs=100,
          batch_size=10,
          callbacks=[
              utils.roc_callback(training_data=[train_X, train_y],
                                 validation_data=[test_X, test_y]),
              TensorBoard(log_dir='final/{}'.format("lstm_mse_nol2_24_0.2"))
          ])
loss, accuracy, rmse = model.evaluate(test_X, test_y)
print('Accuracy: %.2f %%' % (accuracy * 100))
print('RMSE: %.2f %%' % (rmse * 100))
print('Loss: %.2f %%' % (loss * 100))
Exemple #5
0
deep_layer2 = Dense(128,
                    activation='relu',
                    bias_regularizer=regularizers.l2(0.01))(deep_layer1)
deep = Dense(64, activation='relu',
             bias_regularizer=regularizers.l2(0.01))(deep_layer2)
# 组合deep&wide
frame = [X, X]
X_in = pd.concat(frame, axis=1)

coned = concatenate([wide, deep])
out = Dense(1, activation='sigmoid')(coned)
model = Model(inputs=[input_wide, input_deep], outputs=out)
model.compile(optimizer='adam',
              loss='mean_squared_error',
              metrics=['accuracy', utils.rmse])

frame = [test_X, test_X]
test_X_in = pd.concat(frame, axis=1)
model.fit([X, X],
          y,
          epochs=100,
          batch_size=10,
          callbacks=[
              utils.roc_callback(training_data=[[X, X], y],
                                 validation_data=[[test_X, test_X], test_y]),
              TensorBoard(log_dir='final/{}'.format("deep_wide"))
          ])
loss, accuracy, rmse = model.evaluate([test_X, test_X], test_y)
print('Accuracy: %.2f %%' % (accuracy * 100))
print('RMSE: %.2f %%' % (rmse * 100))
print('Loss: %.2f %%' % (loss * 100))