def test_objective(self):
        from orangecontrib.recommendation.ranking.climf import compute_loss

        # Load data
        data = Orange.data.Table(__dataset__)

        steps = [1, 10, 30]
        objectives = []
        learner = CLiMFLearner(num_factors=10, random_state=42, verbose=0)

        for step in steps:
            learner.num_iter = step
            recommender = learner(data)

            # Set parameters
            low_rank_matrices = (recommender.U, recommender.V)
            params = learner.lmbda

            objective = compute_loss(data, low_rank_matrices, params)
            objectives.append(objective)

        # Assert objective values decrease
        test = list(
            map(lambda t: t[0] <= t[1], zip(objectives, objectives[1:])))
        self.assertTrue(all(test))
    def test_objective(self):
        from orangecontrib.recommendation.ranking.climf import compute_loss

        # Load data
        data = Orange.data.Table(__dataset__)

        steps = [1, 10, 30]
        objectives = []
        learner = CLiMFLearner(num_factors=10, random_state=42, verbose=0)

        for step in steps:
            learner.num_iter = step
            recommender = learner(data)

            # Set parameters
            low_rank_matrices = (recommender.U, recommender.V)
            params = learner.lmbda

            objective = compute_loss(data, low_rank_matrices, params)
            objectives.append(objective)

        # Assert objective values decrease
        test = list(
            map(lambda t: t[0] <= t[1], zip(objectives, objectives[1:])))
        self.assertTrue(all(test))
    def test_input_data_continuous(self, *args):
        learner = CLiMFLearner(num_factors=2, num_iter=1, verbose=3,
                               callback=lambda x: None)

        # Test SGD optimizers too
        for opt in __optimizers__:
            learner.optimizer = opt
            print(learner.optimizer)
            super().test_input_data_continuous(learner, filename=__dataset__)
    def test_input_data_continuous(self, *args):
        learner = CLiMFLearner(num_factors=2,
                               num_iter=1,
                               verbose=3,
                               callback=lambda x: None)

        # Test SGD optimizers too
        for opt in __optimizers__:
            learner.optimizer = opt
            print(learner.optimizer)
            super().test_input_data_continuous(learner, filename=__dataset__)
    def test_outputs(self):
        # Load data
        data = Orange.data.Table(__dataset__)

        # Train recommender
        learner = CLiMFLearner(num_factors=2, num_iter=1)
        # Train recommender
        recommender = learner(data)

        # Check tables P, Q
        U = recommender.getUTable()
        V = recommender.getVTable()

        diff = len({U.X.shape[1], V.X.shape[1]})
        self.assertEqual(diff, 1)
 def test_mrr2(self):
     learner = CLiMFLearner(num_factors=10, num_iter=10, verbose=3)
     super().test_mrr(learner,
                      filename='epinions_train.tab',
                      testdata='epinions_test.tab')
 def test_mrr(self, *args):
     learner = CLiMFLearner(num_factors=2, num_iter=1, verbose=0)
     super().test_mrr(learner, filename=__dataset__)
 def test_divergence(self, *args):
     learner = CLiMFLearner(num_factors=2, num_iter=1, learning_rate=1e20)
     super().test_divergence(learner, filename=__dataset__)
 def test_warnings(self, *args):
     learner = CLiMFLearner(num_factors=2, num_iter=1, learning_rate=0.0)
     super().test_warnings(learner, filename=__dataset__)
Exemplo n.º 10
0
 def test_CV(self, *args):
     learner = CLiMFLearner(num_factors=2, num_iter=1)
     super().test_CV(learner, filename=__dataset__)
Exemplo n.º 11
0
 def test_input_data_discrete(self, *args):
     learner = CLiMFLearner(num_factors=2, num_iter=1)
     super().test_input_data_discrete(learner, filename=__dataset2__)