def main(args):
    dataset = args.dataset
    neighborhood_size = args.neighborhood_size
    recommended_list_size = args.recommended_list_size

    data_loader = DataLoader(dataset)
    data_loader.load_data()
    user_number, item_number = data_loader.get_dataset_info()
    train, test = data_loader.train_test_split()
    recommender = RecommenderSystem()
    rating_predictions = recommender.predict_topk_nobias(train,
                                                         k=neighborhood_size)

    evaluator = RecommenderEvaluator()
    print("RMSE={}".format(evaluator.rmse(rating_predictions, test)))
    print("MAE={}".format(evaluator.mae(rating_predictions, test)))
    mean_test = np.true_divide(test.sum(1), (test != 0).sum(1))
    precisions, recalls = evaluator.precision_recall_at_k(
        rating_predictions, test, mean_test, user_number,
        recommended_list_size)
    precision = sum(prec for prec in precisions.values()) / len(precisions)
    recall = sum(rec for rec in recalls.values()) / len(recalls)
    f1 = evaluator.f1(precision, recall)
    print("Precision({})={}".format(recommended_list_size, precision))
    print("Recall({})={}".format(recommended_list_size, recall))
    print("F1({})={}".format(recommended_list_size, f1))
Example #2
0
def main(args):
    dataset = args.dataset
    model_type = args.model
    layers = []
    if model_type != 'latent-factor-model':
        layers = eval(args.layers)
    n_epoch = args.epochs
    max_checkout_without_progress = args.max_checkout_without_progress
    batch_size = args.batch_size
    dimension = args.dimension
    learning_rate = args.learning_rate
    if args.optimizer == 'Adam':
        optimizer = tf.train.AdamOptimizer
    elif args.optimizer == 'RMSProp':
        optimizer = tf.train.RMSPropOptimizer
    else:
        optimizer = tf.train.GradientDescentOptimizer
    dropout_rate = args.dropout_rate
    regularization_factor = args.regularization_factor

    data_loader = DataLoader(dataset)
    data_loader.load_data()
    user_number, item_number = data_loader.get_dataset_info()
    rating_data_train, rating_data_test = data_loader.train_test_split(0.8)

    iter_train = ShuffleIterator([
        rating_data_train["userid"], rating_data_train["itemid"],
        rating_data_train["rating"]
    ],
                                 batch_size=batch_size)

    if dataset == 'ml-100k' or dataset == 'ml-1m':
        userid = "userid"
        itemid = "itemid"
    else:
        userid = "userId"
        itemid = "movieId"

    user_ids_test, item_ids_test, ratings_test = data_loader.get_test_data([
        rating_data_test[userid], rating_data_test[itemid],
        rating_data_test["rating"]
    ])
    model_name = model_type + '-' + dataset
    if model_type == 'latent-factor-model':
        model = LatentFactorModel(batch_size,
                                  dimension,
                                  learning_rate,
                                  user_number,
                                  item_number,
                                  iter_train,
                                  dropout_rate,
                                  optimizer_class=optimizer,
                                  reg_factor=regularization_factor)
    elif model_type == 'deep-neural-network-model':
        model = DeepNeuralNetworkModel(batch_size,
                                       dimension,
                                       learning_rate,
                                       user_number,
                                       item_number,
                                       iter_train,
                                       dropout_rate,
                                       layers=layers,
                                       optimizer_class=optimizer,
                                       reg_factor=regularization_factor)
    elif model_type == 'ensemble-no-transfer-learning':
        model = EnsembleModel(batch_size,
                              dimension,
                              learning_rate,
                              user_number,
                              item_number,
                              iter_train,
                              dropout_rate,
                              layers=layers[:-1],
                              optimizer_class=optimizer,
                              reg_factor=regularization_factor)
    else:
        model = EnsembleModel(batch_size,
                              dimension,
                              learning_rate,
                              user_number,
                              item_number,
                              iter_train,
                              dropout_rate,
                              layers=layers[:-1],
                              optimizer_class=optimizer,
                              reg_factor=regularization_factor,
                              transfer_learning=True)

    model.fit(user_ids_test,
              item_ids_test,
              ratings_test,
              rating_data_train,
              model_name,
              dataset,
              n_epoch=n_epoch,
              max_checkout_without_progress=max_checkout_without_progress)

    predicted_ratings = model.get_test_data_prediction()
    evaluator = RecommenderEvaluator(rating_data_test, predicted_ratings,
                                     dataset)
    print("\nRMSE={}".format(evaluator.rmse()))
    print("MAE={}".format(evaluator.mae()))
    k = 20
    precisions, recalls = evaluator.precision_recall_at_k(k)
    precision = sum(prec for prec in precisions.values()) / len(precisions)
    recall = sum(rec for rec in recalls.values()) / len(recalls)
    f1 = evaluator.f1(precision, recall)
    print("Precision({})={}".format(k, precision))
    print("Recall({})={}".format(k, recall))
    print("F1({})={}".format(k, f1))