def test(num_feature=30, delta=1, lamda_p=2): bmf_model = example() rating_matrix = bmf_model.matrix user_feature = bmf_model.user_features print 'user_feature.shape', len(user_feature) # required parameters beta_item = bmf_model.beta_item N = bmf_model.num_item # item mean and variance mu_Sigma_items = bmf_model.ret_mu_Sigma_item # split data to training & validation num_user, num_item, ratings = build_ml_len() matrix = build_rating_matrix(num_user, num_item, ratings) train_matrix = train_rating120(matrix) test = train_matrix[0] ratings = [] for item in xrange(len(test)): rating = [0 for x in xrange(3)] if test[item] != 0: rating[1] = item rating[2] = test[item] rating = np.array(rating) ratings.append(rating) ratings = np.array(ratings) ts_model = ThompsonSampling(rating_matrix, user_feature, num_feature, mu_Sigma_items, delta, lamda_p, ratings) result = ts_model._estimate(ratings) thompson_sampling_file = open('../result/ret_thompson_sampling_test', 'w') for item in result: print >> thompson_sampling_file, item thompson_sampling_file.close()
def test(num_feature=30, delta=1, lamda_p=2, alpha=1.8): bmf_model = example() rating_matrix = bmf_model.matrix user_feature = bmf_model.user_features print 'user_feature.shape', len(user_feature) # required parameters beta_item = bmf_model.beta_item N = bmf_model.num_item # item mean and variance item_features = bmf_model.item_features num_user, num_item, ratings = build_ml_len() matrix = build_rating_matrix(num_user, num_item, ratings) train_matrix = train_rating120(matrix) test = train_matrix[0] ratings = [] for item in xrange(len(test)): rating = [0 for x in xrange(3)] if test[item] != 0: rating[1] = item rating[2] = test[item] rating = np.array(rating) ratings.append(rating) ratings = np.array(ratings) linearUCB_cluster_model = LinearUCBCluster(rating_matrix, user_feature, num_feature, item_features, delta, lamda_p, alpha, ratings) linearUCB_cluster_model._init_parameters_cluster_user(20, num_user) result = linearUCB_cluster_model._estimate(ratings) cluster_linearUCB_file = open('../result/ret_cluster_Linear_UCB_test', 'w') for item in result: cluster_linearUCB_file.write(item + '\n') cluster_linearUCB_file.close()
def test(num_feature=30, delta=1, lamda_p=2, alpha=1.8): bmf_model = example() rating_matrix = bmf_model.matrix user_feature = bmf_model.user_features print 'user_feature.shape', len(user_feature) # required parameters beta_item = bmf_model.beta_item N = bmf_model.num_item # item features from MAP solution item_features = bmf_model.item_features # split data to training & validation num_user, num_item, ratings = build_ml_len() matrix = build_rating_matrix(num_user, num_item, ratings) train_matrix = train_rating120(matrix) test = train_matrix[0] ratings = [] for item in xrange(len(test)): rating = [0 for x in xrange(3)] if test[item] != 0: rating[1] = item rating[2] = test[item] rating = np.array(rating) ratings.append(rating) ratings = np.array(ratings) ts_model = EpsilonGreedy(rating_matrix, user_feature, num_feature, item_features, delta, lamda_p, alpha, ratings) result = ts_model._estimate(ratings) thompson_sampling_file = open('../result/ret_linear_UCB_test', 'w') for item in result: print>>thompson_sampling_file, item thompson_sampling_file.close()
def draw_sparsity_user(): num_user, num_item, ratings = build_ml_len() matrix = build_rating_matrix(num_user, num_item, ratings) print matrix.shape x = np.arange(num_item) plt.plot(x, matrix[6], 'k.') plt.axis([0,1000,1,6]) plt.show()
def test(num_feature=30, delta=1, lamda_p=2, alpha=1.8): bmf_model = example() rating_matrix = bmf_model.matrix user_feature = bmf_model.user_features print 'user_feature.shape', len(user_feature) # required parameters beta_item = bmf_model.beta_item N = bmf_model.num_item # item features from MAP solution # item_features = bmf_model.item_features # # mat = np.matrix(item_features) # dataframe = pd.DataFrame(data=mat.astype(float)) # dataframe.to_csv('../result/item_feature.csv', sep=',', header=False, float_format='%.2f', index=False) df = pd.read_csv('../result/item_feature.csv', sep=',', header=None) item_features = np.array(df) # split data to training & validation num_user, num_item, ratings = build_ml_len() matrix = build_rating_matrix(num_user, num_item, ratings) train_matrix = train_rating120(matrix) # test = train_matrix[0] # ratings = [] # for item in xrange(len(test)): # rating = [0 for x in xrange(3)] # if test[item] != 0: # rating[1] = item # rating[2] = test[item] # rating = np.array(rating) # ratings.append(rating) # ratings = np.array(ratings) UCB_Dependence_model = LinearUCBDependence(rating_matrix, user_feature, num_feature, item_features, delta, lamda_p, alpha, ratings) result = UCB_Dependence_model._estimate(ratings) UCB_Dependence_model = open('../result/ret_linear_UCB_dependence', 'w') for item in result: print >> UCB_Dependence_model, item UCB_Dependence_model.close()
def test(num_feature=30, delta=1, lamda_p=2): bmf_model = example() rating_matrix = bmf_model.matrix user_feature = bmf_model.user_features print 'user_feature.shape', len(user_feature) # required parameters beta_item = bmf_model.beta_item N = bmf_model.num_item # item mean and variance mu_Sigma_items = bmf_model.ret_mu_Sigma_item num_user, num_item, ratings = build_ml_len() matrix = build_rating_matrix(num_user, num_item, ratings) # train_matrix = train_rating120(matrix) # test = train_matrix[0] # ratings = [] # for item in xrange(len(test)): # rating = [0 for x in xrange(3)] # if test[item] != 0: # rating[1] = item # rating[2] = test[item] # rating = np.array(rating) # ratings.append(rating) # ratings = np.array(ratings) ts_model = ThompsonSamplingCluster(rating_matrix, user_feature, num_feature, mu_Sigma_items, delta, lamda_p, ratings) ts_model._init_parameters_cluster_user(20, num_user) result = ts_model._estimate(ratings) thompson_sampling_file = open('../result/ret_cluster_thompson_sampling', 'w') for item in result: thompson_sampling_file.write(item + '\n') thompson_sampling_file.close()
def test(): num_user, num_item, ratings = build_ml_len() np.random.shuffle(ratings) matrix = build_rating_matrix(num_user, num_item, ratings) train_matrix = train_rating120(matrix) test = train_matrix[0] # ratings = [] # for item in xrange(len(test)): # rating = [0 for x in xrange(3)] # if test[item] != 0: # rating[1] = item # rating[2] = test[item] # rating = np.array(rating) # ratings.append(rating) # ratings = np.array(ratings) random_model = RandomSampling(matrix, ratings) result = random_model._estimate(ratings, num_item) random_sampling_file = open('../result/ret_random_sampling', 'w') for item in result: print>>random_sampling_file, item random_sampling_file.close()