#model=MLP(features_sizes,deep_layers=(256,256),k=256) #小batch=1024 LR不用小.同1e-3 valid_score=model.fit(train[features],valid[features],y_train,y_valid,lr=0.001,N_EPOCH=100,batch_size=1024,early_stopping_rounds=15) #model = DeepFM(features_sizes, deep_layers=(256, 256), k=256) #model = NFM(features_sizes, k=256) #model = AFM(features_sizes,k=256,attention_FM=256) #model = AFM(features_sizes, k=256, attention_FM=8,dropout_keeprate=0.9,lambda_l2=0.001) #model = MLP(features_sizes, deep_layers=(1,), k=256) #model=AutoInt(features_sizes,k=8) valid_score = model.fit(train[features], valid[features], y_train, y_valid, lr=0.001, N_EPOCH=100, batch_size=4096, early_stopping_rounds=15) y_pred = model.predict(test[features]).reshape((-1)) predictions_bounded = np.maximum(y_pred, np.ones(len(y_pred)) * -1) # bound the lower values predictions_bounded = np.minimum(predictions_bounded, np.ones(len(y_pred)) * 1) # bound the higher values test_loss = np.sqrt( np.mean( np.square( y_test.reshape(predictions_bounded.shape) - predictions_bounded))) print("Protocol Test Score:", test_loss) ls.append(test_loss) if _ != Rounds - 1:
#model=CFM(features_sizes,loss_type='binary',metric_type='auc') #model=MLR(features_sizes,loss_type='binary',metric_type='auc',MLR_m=16) #model=MFM(features_sizes,k=8,loss_type='binary',metric_type='auc',MFM_m=2) best_score = model.fit(X_train[cate_features], X_valid[cate_features], y_train, y_valid, lr=0.0005, N_EPOCH=50, batch_size=500, early_stopping_rounds=3) #0.0005->0.001(1e-3 bs=1000) SUBMIT = False if SUBMIT: y_pred = model.predict(test[cate_features]) y_pred = 1. / (1. + np.exp(-1. * y_pred)) sample_submission['isFraud'] = y_pred #sample_submission.to_csv(data_path+'sub/sub01_LR_F49_timeSF_0.8154.csv',index=False) #sample_submission.to_csv(data_path+'sub/sub05_MLR_m=15_nosig_F49_timeSF_0.8154.csv',index=False) #LR:0.8774 KG:0.8261 #TIMESF # LGB:0.8442@90 KG=0.8549 # CAT:with cat_fea # dep=6:0.8471 # dep=8:0.8674 kg=0.8597 # LR:0.8154@11 KG=0.8468 # FM:0.8220@3 KG=0.8384 | FM k=6 0.8225 | FM k=4 0.8109 | k=3 0.8133 # MLP(256,128,64):0.8066@1 KG= | (64,64) 0.8022@1 # BiFM 0.8136 (very slow when fields a lot)