def test_recall_score_unsupported_average(): """Test of `recall_score` with unsupported average parameter.""" y_pred = np.array([0, 1, 2]) y_true = np.array([0, 1, 2]) with pytest.raises(InvalidInput): recall_score(y_pred, y_true, average="foobar")
def test_recall_score_inconsistancy(): """Test of `recall_score` with input inconsistency.""" y_pred = np.array([0, 1]) y_true = np.array([1]) with pytest.raises(InvalidInput): recall_score(y_pred, y_true)
def test_recall_score_multiclass_labels_macro(): """Test of `recall_score` with multiclass labels and `macro` average.""" y_true = np.array([0, 1, 2, 0, 1, 2]) y_pred = np.array([0, 2, 1, 0, 0, 1]) recall = recall_score(y_pred, y_true, average="macro") assert recall == pytest.approx(0.33, rel=3e-2)
def train_and_report(model, X_train, y_train, X_test, y_test): """Train the model and print a report.""" # Training start = timer() model.fit(X_train, y_train) end = timer() # Report print(f"Accuracy : {model.score(X_test, y_test):.3f}") print("---") precision = precision_score(model.predict(X_test), y_test, average="macro") print(f"Precision : {precision:.3f}") recall = recall_score(model.predict(X_test), y_test, average="macro") print(f"Recall : {recall:.3f}") print("---") f1 = f1_score(model.predict(X_test), y_test, average="macro") print(f"F1 score : {f1:.3f}") print("---") print(f"Training time : {end - start:.4f}s\n")
def test_recall_score_multiclass_labels(): """Test of `recall_score` with multiclass labels and no average.""" y_true = np.array([0, 1, 2, 0, 1, 2]) y_pred = np.array([0, 2, 1, 0, 0, 1]) assert np.allclose(recall_score(y_pred, y_true), np.array([1, 0, 0]))
def test_recall_score_binary_labels(): """Test of `recall_score` with binary labels.""" y_pred = np.array([1, 1, 1, 0, 1]) y_true = np.array([0, 1, 1, 1, 1]) assert recall_score(y_pred, y_true) == 0.75
def test_recall_score_no_prediction(): """Test of `recall_score` with no prediction.""" y_pred = np.array([0]) y_true = np.array([1]) assert recall_score(y_pred, y_true) == 0