# num_eval += 1 is_relevant = np.array([ False, False, False, False, False, False, False, False, False, False ]) #if True in is_relevant[:3]: #goodUsers.append(user) num_eval += 1 cumulative_precision += eval.precision(is_relevant, relevant_items) cumulative_recall += eval.recall(is_relevant, relevant_items) cumulative_MAP += eval.MAP(is_relevant, relevant_items) cumulative_precision /= num_eval cumulative_recall /= num_eval cumulative_MAP /= num_eval result_dict = { "precision": cumulative_precision, "recall": cumulative_recall, "MAP": cumulative_MAP } print(result_dict) with open("../../../Outputs/LightFM_topPop_1_9600_all.csv", 'w') as f: f.write("user_id,item_list\n") for user_id in target_users: f.write( str(user_id) + "," + utils.trim(np.array(recommender.recommend(user_id))) + "\n")
outputs[u].append(recommendations.get(key).recommend(u)) elif target_age < 0 and target_region < 0: if u in users_dict[0]: outputs[u].append(recommendations.get(0).recommend(u)) else: outputs[u].append(recommendations.get(-1).recommend(u)) #print(u) print("DONE!") with open('../../Dataset/topPopularRecommendations.p', 'wb') as fp: pickle.dump(outputs, fp, protocol=pickle.HIGHEST_PROTOCOL) return outputs #output = create_recommendations() #exit() with open('../../Dataset/topPopularRecommendations.p', 'rb') as fp: output = pickle.load(fp) f = open("../../Outputs/TopPop_freeze.csv", 'w') f.write("user_id,item_list\n") for u in utils.get_target_users("../../Dataset/target_users_freeze.csv",seek=8): if u in output.keys(): if len(output[u])==1: f.write(str(u) + "," + utils.trim(output[u][0]) + "\n") else: #print('user want MIXER') f.write(str(u) + "," + utils.trim(select1(output[u])) + "\n") else: print("WE HAVE A PROBLEM {0} IS NOT HERE".format(u))
} return result_dict URM_test = sps.csr_matrix(sps.load_npz("../Dataset/URM/data_test.npz")) #URM_val = sps.csr_matrix(sps.load_npz("../Dataset/old/data_validation.npz")) targetUsers = util.get_target_users("../Dataset/target_users.csv", seek=9) print("TESTING") res_test = run(targetUsers, URM_test) print(res_test["MAP"]) """ print(res_test) print("VALIDATION") res_val = run(targetUsers, URM_val) print(res_val["MAP"]) #print(res_val) print((res_val["MAP"]+res_test["MAP"])/2) """ with open("../Outputs/hybrid_norm_15_1_1.csv", 'w') as f: f.write("user_id,item_list\n") for user_id in targetUsers: #print(user_id) f.write( str(user_id) + "," + util.trim(np.array(res_test["RecSys"][user_id])) + "\n") util.compare_csv("../Outputs/truth.csv", "../Outputs/hybrid_norm_15_1_1.csv")
score[j] += 1 else: score[j] = 1 max = 0 #print(score) rec = list() rec.append(list()) for w in sorted(score, key=score.get, reverse=True): if max > 9: break rec[0].append(w) max += 1 count += 1 #print("user {0} in more than 1 cluster".format(u)) #print(rec) f.write(str(u) + "," + utils.trim(np.array(rec[0])) + "\n") print(count) """ pyplot.plot(MAP_pureSVD_per_group, label="pureSVD") pyplot.plot(MAP_sslim_per_group, label="sslim") pyplot.plot(MAP_CFItem_per_group, label="CFItem") pyplot.plot(MAP_slim_per_group, label="slim") pyplot.plot(MAP_CFUser_per_group, label="CFUser") pyplot.plot(MAP_CBItem_per_group, label="CBItem") pyplot.plot(MAP_P3a_per_group, label="P3a") pyplot.plot(MAP_P3b_per_group, label="P3b") pyplot.plot(MAP_NMF_per_group, label="elasticNet") pyplot.ylabel('MAP') pyplot.xlabel('User Group')
scores[user_profile] = -np.inf return scores itemColl = ItemKNNCFRecommender(URM_all) itemColl.fit(shrink=50, topK=10) elasticNet = SLIMElasticNetRecommender(URM_all) elasticNet.fit() users = utils.get_target_users("../../Dataset/target_users.csv") hybridrecommender = ItemKNNScoresHybridRecommender(URM_all, itemColl, elasticNet) hybridrecommender.fit(0.3) with open("../../../Outputs/itemColl+elasticNet_0.3.csv", 'w') as f: f.write("user_id,item_list\n") for user_id in users: f.write(str(user_id) + ", " + utils.trim(hybridrecommender.recommend(user_id)[:10]) + "\n") hybridrecommender.fit(0.5) with open("../../../Outputs/itemColl+elasticNet_0.5.csv", 'w') as f: f.write("user_id,item_list\n") for user_id in users: f.write(str(user_id) + ", " + utils.trim(hybridrecommender.recommend(user_id)[:10]) + "\n") hybridrecommender.fit(0.7) with open("../../../Outputs/itemColl+elasticNet_0.7.csv", 'w') as f: f.write("user_id,item_list\n") for user_id in users: f.write(str(user_id) + ", " + utils.trim(hybridrecommender.recommend(user_id)[:10]) + "\n")
l=2.299, n=True) with open("../../Outputs/hybrid_norm_final2.csv", 'w') as f: f.write("user_id,item_list\n") user_batch_start = 0 while user_batch_start < len(users): user_batch_end = user_batch_start + 1000 user_batch_end = min(user_batch_end, len(users)) test_user_batch_array = np.array( users[user_batch_start:user_batch_end]) user_batch_start = user_batch_end # Compute predictions for a batch of users using vectorization, much more efficient than computing it one at a time recommended_items_batch_list, scores_batch = rec_sys.recommend( test_user_batch_array, remove_seen_flag=True, cutoff=10, remove_top_pop_flag=False, remove_custom_items_flag=False, return_scores=True) i = 0 for user_id in test_user_batch_array: # print(user_id) f.write( str(user_id) + "," + utils.trim(np.array(recommended_items_batch_list[i])) + "\n") i = i + 1