Exemplo n.º 1
0
 def test_decision_PRF1_load_test_and_ground_truth(self):
     decision = PrecisionRecallF1()
     decision.load_test(self.TEST_DECISION)
     assert_equal(len(decision.get_test()), len(self.TEST_DECISION))
     decision.load_ground_truth(self.GT_DECISION)
     assert_equal(len(decision.get_ground_truth()), len(self.GT_DECISION))
     P, R, F1 = decision.compute()
     assert_equal(P, 0.75)
     assert_equal(R, 0.75)
     assert_equal(F1, 0.75)
Exemplo n.º 2
0
def pre_rec_graph(dataset,algo):
    print "-" *15, "GRAPH GEN W/ PRECISION RECALL", "-"*15
    df1 = pd.read_csv("pred_matrix-full_ubcf.csv", sep=",")
    df2 = pd.read_csv("pred_matrix-full_ibcf.csv", sep=",")

    ub_pred = list(df1.ix[::,'rat'])
    ib_pred = list(df2.ix[::,'rat'])
    ds_prev = list(dataset.ix[::,'Rating'])

    decision = PrecisionRecallF1()
    decision.load(ground_truth=ub_pred,test=ib_pred)
    result = decision.compute()
    print "PRF1 w.r.t. IBCF to UBCF Pred Matrix\nRECALL:",result[0],"\nPRECISION:",result[1],"\nF1:",result[2],"\n"

    decision.load(ground_truth=ds_prev, test=ib_pred)
    result = decision.compute()
    print "PRF1 w.r.t. IBCF Pred Matrix to U.DATA \nRECALL:", result[0], "\nPRECISION:", result[1], "\nF1:", result[2]
    print "\n"
Exemplo n.º 3
0
    def compute(self):
        super(AveragePrecision, self).compute()
        from recsys.evaluation.decision import PrecisionRecallF1

        if not isinstance(self._test, list):
            self._test = [self._test]

        PRF1 = PrecisionRecallF1()
        p_at_k = []
        hits = 0
        for k in range(1, len(self._test) + 1):
            test = self._test[:k]
            PRF1.load(self._ground_truth, test)
            if test[k - 1] in self._ground_truth:
                p, r, f1 = PRF1.compute()
                hits += 1
            else:
                p = 0.0
            p_at_k.append(p)
        if not hits:
            return 0.0
        return sum(p_at_k) / hits
Exemplo n.º 4
0
 def test_decision_PRF1_load_ground_truth(self):
     decision = PrecisionRecallF1()
     decision.load_ground_truth(self.GT_DECISION)
     assert_equal(len(decision.get_ground_truth()), len(self.GT_DECISION))
Exemplo n.º 5
0
 def test_decision_PRF1_load_test(self):
     decision = PrecisionRecallF1()
     decision.load_test(self.TEST_DECISION)
     assert_equal(len(decision.get_test()), len(self.TEST_DECISION))
Exemplo n.º 6
0
 def test_decision_PRF1_empty(self):
     decision = PrecisionRecallF1()
     assert_raises(ValueError, decision.compute)
Exemplo n.º 7
0
 def __init__(self):
     super(TestDecision, self).__init__()
     # Decision-based metrics: PrecisionRecallF1
     self.decision = PrecisionRecallF1()
     self.decision.load(self.GT_DECISION, self.TEST_DECISION)