Example #1
0
def test_recs(RecommenderClass: Type[BaseRecommender]) -> None:
    """Test the learning of recommenders exit normally, and they are picklable.

    Args:
        RecommenderClass (Type[BaseRecommender]): The recommender class to be tested.
    """
    rec = RecommenderClass(X_train)
    rec.learn()

    scores = rec.get_score(np.arange(X_train.shape[0]))
    eval = Evaluator(X_test, 0, 20)
    with pytest.raises(ValueError):
        eval.get_score(rec)
    metrics = eval.get_scores(rec, cutoffs=[X_train.shape[1]])
    assert np.all(np.isfinite(scores))
    assert np.all(~np.isnan(scores))
    for value in metrics.values():
        assert ~np.isnan(value)
        assert np.isfinite(value)
    with open("temp.pkl", "wb") as ofs:
        pickle.dump(rec, ofs)
    with open("temp.pkl", "rb") as ifs:
        rec_dumped: BaseRecommender = pickle.load(ifs)
    score_from_dumped = rec_dumped.get_score(np.arange(X_train.shape[0]))
    np.testing.assert_allclose(scores, score_from_dumped)
Example #2
0
    test_configs: List[Tuple[Type[BaseOptimizer], int]] = [
        (TopPopOptimizer, 1),
        (CosineKNNOptimizer, 40),
        (AsymmetricCosineKNNOptimizer, 40),
        (TverskyIndexKNNOptimizer, 40),
        (DenseSLIMOptimizer, 20),
        (P3alphaOptimizer, 40),
        (RP3betaOptimizer, 40),
        (IALSOptimizer, 40),
        # (BPRFMOptimizer, 40),
        # (MultVAEOptimizer, 5),
        # (SLIMOptimizer, 40),
    ]
    for optimizer_class, n_trials in test_configs:
        name = optimizer_class.__name__
        optimizer: BaseOptimizer = optimizer_class(X_train_all,
                                                   valid_evaluator)
        (best_param,
         validation_results) = optimizer.optimize(timeout=14400,
                                                  n_trials=n_trials)
        validation_results.to_csv(f"{name}_validation_scores.csv")
        test_recommender = optimizer.recommender_class(X_train_val_all,
                                                       **best_param)
        test_recommender.learn()
        test_scores = test_evaluator.get_scores(test_recommender, [5, 10, 20])

        test_results.append(
            dict(name=name, best_param=best_param, **test_scores))
        with open("test_results.json", "w") as ofs:
            json.dump(test_results, ofs, indent=2)