def from_protobuf( cls, message, ): return ModelMetrics( confusion_matrix=ConfusionMatrix.from_protobuf( message.scoreMatrix), regression_metrics=RegressionMetrics.from_protobuf( message.regressionMetrics), model_type=message.modelType, )
def test_confusion_matrix_to_protobuf(): targets_1 = ["cat", "dog", "pig"] predictions_1 = ["cat", "dog", "dog"] scores_1 = [0.1, 0.2, 0.4] labels_1 = ["cat", "dog", "pig"] conf_M_1 = ConfusionMatrix(labels_1) conf_M_1.add(predictions_1, targets_1, scores_1) message = conf_M_1.to_protobuf() expected_1 = [[1, 0, 0], [0, 1, 1], [0, 0, 0]] new_conf = ConfusionMatrix.from_protobuf(message) for idx, value in enumerate(new_conf.labels): assert value == conf_M_1.labels[idx] for idx, value in enumerate(new_conf.labels): for jdx, value_2 in enumerate(new_conf.labels): assert new_conf.confusion_matrix[ idx, jdx].floats.count == expected_1[idx][jdx]
def test_parse_empty_protobuf_should_return_none(): empty_message = ScoreMatrixMessage() assert ConfusionMatrix.from_protobuf(empty_message) is None
def from_protobuf( cls, message, ): return ModelMetrics(confusion_matrix=ConfusionMatrix.from_protobuf( message.scoreMatrix))