Beispiel #1
0
plt.savefig('3_fold_CV_Reg_Param.png')

plt.figure(figsize=(20, 12))
plt.rcParams.update({'font.size': 12})
plt.plot(Train_CV.cv_results['param_n_factors'],
         Train_CV.cv_results['mean_test_rmse'], '.k')
plt.xlabel('Number of Factores')
plt.ylabel('RMSE')
plt.grid()
plt.title('3-Fold CV - Number of Factors')
plt.savefig('3_fold_CV_Factors.png')

# %% Best Hyper-parameters Training
alg = SVD()

alg.biased = Grid_Search_Result.best_params['rmse']['biased']
alg.n_epochs = Grid_Search_Result.best_params['rmse']['n_epochs']
alg.n_factors = Grid_Search_Result.best_params['rmse']['n_factors']
alg.reg_pu = Grid_Search_Result.best_params['rmse']['reg_all']
alg.reg_qi = Grid_Search_Result.best_params['rmse']['reg_all']
alg.lr_pu = Grid_Search_Result.best_params['rmse']['lr_all']
alg.lr_qi = Grid_Search_Result.best_params['rmse']['lr_all']

alg.fit(data_train.build_full_trainset())

# %% Loading Test Data
file_path = "Data/sample_submission.csv"
data_test = utils.load_data_desired(file_path)

# %% Prediction
Predict_Test = []
alg_NMF.reg_qi = 0.1
alg_NMF.verbose = True

start = time.time()

alg_NMF.fit(data_train.build_full_trainset())

end = time.time()
print("***********************************************")
print("Exe time:")
print(end - start)

# %% Best Hyper-parameters Training - SVD
alg_SVD = SVD()

alg_SVD.biased = True
alg_SVD.n_epochs = 50
alg_SVD.n_factors = 35
alg_SVD.reg_pu = 0.1
alg_SVD.reg_qi = 0.1
alg_SVD.verbose = True

start = time.time()

alg_SVD.fit(data_train.build_full_trainset())

end = time.time()
print("***********************************************")
print("Exe time:")
print(end - start)
file.write("************************************************************ \n")
file.write("+ Best Param: \n \n")
file.write(str(Train_CV.best_params) + "\n \n")
file.write("************************************************************ \n")
file.write("+ CV Summary: \n \n")
file.write(str(Train_CV.cv_results) + "\n \n")
file.write("************************************************************ \n")

file.close()

# *****************************************************************************
# %% Best Hyper-parameters Training:
# Training over whole training dataset, using best hyper-parameters
alg = SVD()

alg.biased = Train_CV.best_params['rmse']['biased']
alg.n_epochs = Train_CV.best_params['rmse']['n_epochs']
alg.n_factors = Train_CV.best_params['rmse']['n_factors']
alg.reg_pu = Train_CV.best_params['rmse']['reg_pu']
alg.reg_qi = Train_CV.best_params['rmse']['reg_qi']
alg.reg_bu = Train_CV.best_params['rmse']['reg_bu']
alg.reg_bi = Train_CV.best_params['rmse']['reg_bi']
alg.lr_pu = Train_CV.best_params['rmse']['lr_all']
alg.lr_qi = Train_CV.best_params['rmse']['lr_all']
alg.verbose = True
alg.random_state = 0

alg.fit(data_train.build_full_trainset())

# *****************************************************************************
# %% Loading Test Data
Beispiel #4
0
# # Train Algorithms

# Based on each gridsearch, we apply the same parameters for each algorithms on
# sample test set to get individual predictions.

# ## SVD

# In[ ]:

#SVD with baselines

algo = SVD()
algo.n_factors = 400
algo.verbose = False
algo.biased = True
algo.reg_all = 0.1
algo.lr_all = 0.01
algo.n_epochs = 500
algo.random_state = seed

print("Training SVD...")
algo.fit(trainset)

print("Computing predictions for SVD... \n")
test_predictions_svd = algo.test(
    testset)  #Get real predictions to append to big final matrix

# In[ ]:

test_predictions_svd = np.asarray(test_predictions_svd)