n_components=10,
    n_tastes=3,
    user_repr_graph=NormalizedLinearRepresentationGraph(),
    attention_graph=None,
    loss_graph=BalancedWMRBLossGraph(),
)

model_with_attention = TensorRec(
    n_components=10,
    n_tastes=3,
    user_repr_graph=NormalizedLinearRepresentationGraph(),
    attention_graph=LinearRepresentationGraph(),
    loss_graph=BalancedWMRBLossGraph(),
)

results_without_attention = fit_and_eval(model=model_without_attention,
                                         user_features=user_features,
                                         item_features=item_features,
                                         train_interactions=train_interactions,
                                         test_interactions=test_interactions,
                                         fit_kwargs=fit_kwargs)
results_with_attention = fit_and_eval(model=model_with_attention,
                                      user_features=user_features,
                                      item_features=item_features,
                                      train_interactions=train_interactions,
                                      test_interactions=test_interactions,
                                      fit_kwargs=fit_kwargs)

logging.info("Results without attention: {}".format(results_without_attention))
logging.info("Results with attention:    {}".format(results_with_attention))
Exemple #2
0
                       CosineSimilarityPredictionGraph,
                       EuclidianSimilarityPredictionGraph):
        for repr_graph in (LinearRepresentationGraph, ReLURepresentationGraph):
            for n_tastes in (1, 3):

                # Build the model, fit, and get a result packet
                model = TensorRec(
                    n_components=n_components,
                    n_tastes=n_tastes,
                    biased=biased,
                    loss_graph=loss_graph(),
                    prediction_graph=pred_graph(),
                    user_repr_graph=NormalizedLinearRepresentationGraph(),
                    item_repr_graph=repr_graph())
                result = fit_and_eval(model, user_features, item_features,
                                      train_interactions, test_interactions,
                                      fit_kwargs)

                # Build results row for this configuration
                res_string = "{}".format(loss_graph.__name__)
                res_string = append_to_string_at_point(res_string,
                                                       pred_graph.__name__, 30)
                res_string = append_to_string_at_point(res_string,
                                                       repr_graph.__name__, 66)
                res_string = append_to_string_at_point(res_string, biased, 98)
                res_string = append_to_string_at_point(res_string, n_tastes,
                                                       108)
                res_string = append_to_string_at_point(
                    res_string, ": {}".format(result[0]), 118)
                res_string = append_to_string_at_point(res_string, result[1],
                                                       141)
Exemple #3
0
                          user_repr_graph=user_repr(),
                          loss_graph=BalancedWMRBLossGraph(),
                          biased=False)

        # Fit the model and get a result packet
        fit_kwargs = {
            'epochs': 500,
            'learning_rate': .01,
            'n_sampled_items': 100,
            'verbose': True
        }
        result = fit_and_eval(model,
                              user_features,
                              item_features,
                              train_interactions,
                              test_interactions,
                              fit_kwargs,
                              recall_k=100,
                              precision_k=100,
                              ndcg_k=100)

        # Build results row for this configuration
        res_string = "{}".format(user_repr.__name__)
        res_string = append_to_string_at_point(res_string, item_repr.__name__,
                                               40)
        res_string = append_to_string_at_point(res_string,
                                               ": {:0.4f}".format(result[3]),
                                               68)
        res_string = append_to_string_at_point(res_string,
                                               "{:0.4f}".format(result[0]), 90)
        res_string = append_to_string_at_point(res_string,
        ]


# Try different configurations using DeepRepresentationGraph for both item_repr and user_repr. If
# DeepRepresentationGraph is used, a deep neural network will learn to represent the users or items.
for user_repr in (NormalizedLinearRepresentationGraph, DeepRepresentationGraph):
    for item_repr in (LinearRepresentationGraph, DeepRepresentationGraph):
        model = TensorRec(n_components=20,
                          item_repr_graph=item_repr(),
                          user_repr_graph=user_repr(),
                          loss_graph=BalancedWMRBLossGraph(),
                          biased=False)

        # Fit the model and get a result packet
        fit_kwargs = {'epochs': 500, 'learning_rate': .01, 'n_sampled_items': 100, 'verbose': True}
        result = fit_and_eval(model, user_features, item_features, train_interactions, test_interactions, fit_kwargs,
                              recall_k=100, precision_k=100, ndcg_k=100)

        # Build results row for this configuration
        res_string = "{}".format(user_repr.__name__)
        res_string = append_to_string_at_point(res_string, item_repr.__name__, 40)
        res_string = append_to_string_at_point(res_string, ": {:0.4f}".format(result[3]), 68)
        res_string = append_to_string_at_point(res_string, "{:0.4f}".format(result[0]), 90)
        res_string = append_to_string_at_point(res_string, "{:0.4f}".format(result[4]), 110)
        res_string = append_to_string_at_point(res_string, "{:0.4f}".format(result[1]), 130)
        res_string = append_to_string_at_point(res_string, "{:0.4f}".format(result[5]), 150)
        res_string = append_to_string_at_point(res_string, "{:0.4f}".format(result[2]), 170)
        logging.info(header)
        logging.info(res_string)
        result_strings.append(res_string)

# Log the final results of all models
Exemple #5
0
    n_components=10,
    n_tastes=3,
    user_repr_graph=NormalizedLinearRepresentationGraph(),
    attention_graph=None,
    loss_graph=BalancedWMRBLossGraph(),
)

model_with_attention = TensorRec(
    n_components=10,
    n_tastes=3,
    user_repr_graph=NormalizedLinearRepresentationGraph(),
    attention_graph=LinearRepresentationGraph(),
    loss_graph=BalancedWMRBLossGraph(),
)

results_without_attention = fit_and_eval(model=model_without_attention,
                                         user_features=user_features,
                                         item_features=item_features,
                                         train_interactions=train_interactions,
                                         test_interactions=test_interactions,
                                         fit_kwargs=fit_kwargs)
results_with_attention = fit_and_eval(model=model_with_attention,
                                      user_features=user_features,
                                      item_features=item_features,
                                      train_interactions=train_interactions,
                                      test_interactions=test_interactions,
                                      fit_kwargs=fit_kwargs)

logging.info("Results without attention: {}".format(results_without_attention))
logging.info("Results with attention:    {}".format(results_with_attention))
# Iterate through many possibilities for model configuration
for loss_graph in (WMRBLossGraph, BalancedWMRBLossGraph):
    for pred_graph in (DotProductPredictionGraph, CosineSimilarityPredictionGraph,
                       EuclidianSimilarityPredictionGraph):
        for repr_graph in (LinearRepresentationGraph, ReLURepresentationGraph):
            for n_tastes in (1, 3):

                # Build the model, fit, and get a result packet
                model = TensorRec(n_components=n_components,
                                  n_tastes=n_tastes,
                                  biased=biased,
                                  loss_graph=loss_graph(),
                                  prediction_graph=pred_graph(),
                                  user_repr_graph=NormalizedLinearRepresentationGraph(),
                                  item_repr_graph=repr_graph())
                result = fit_and_eval(model, user_features, item_features, train_interactions, test_interactions,
                                      fit_kwargs)

                # Build results row for this configuration
                res_string = "{}".format(loss_graph.__name__)
                res_string = append_to_string_at_point(res_string, pred_graph.__name__, 30)
                res_string = append_to_string_at_point(res_string, repr_graph.__name__, 66)
                res_string = append_to_string_at_point(res_string, biased, 98)
                res_string = append_to_string_at_point(res_string, n_tastes, 108)
                res_string = append_to_string_at_point(res_string, ": {}".format(result[0]), 118)
                res_string = append_to_string_at_point(res_string, result[1], 141)
                res_string = append_to_string_at_point(res_string, result[2], 164)
                res_strings.append(res_string)
                print(res_string)

print('--------------------------------------------------')
for res_string in res_strings: