Ejemplo n.º 1
0
def tests_no_metrics_to_protobuf_classification():
    mod_met = ModelMetrics(model_type=ModelType.CLASSIFICATION)

    assert mod_met.model_type == ModelType.CLASSIFICATION
    message = mod_met.to_protobuf()

    model_metrics = ModelMetrics.from_protobuf(message)
    assert model_metrics.model_type == ModelType.CLASSIFICATION
Ejemplo n.º 2
0
def tests_no_metrics_to_protobuf_regression():

    mod_met = ModelMetrics(model_type=ModelType.REGRESSION)
    assert mod_met.model_type == ModelType.REGRESSION
    message = mod_met.to_protobuf()

    model_metrics = ModelMetrics.from_protobuf(message)
    assert model_metrics.model_type == ModelType.REGRESSION
Ejemplo n.º 3
0
 def from_protobuf(cls, message: ModelProfileMessage):
     # convert google.protobuf.pyext._message.RepeatedScalarContainer to a list
     output_fields = [f for f in message.output_fields]
     return ModelProfile(
         output_fields=output_fields,
         metrics=ModelMetrics.from_protobuf(message.metrics),
     )
Ejemplo n.º 4
0
def tests_model_metrics():
    mod_met = ModelMetrics(model_type=ModelType.CLASSIFICATION)

    targets_1 = ["cat", "dog", "pig"]
    predictions_1 = ["cat", "dog", "dog"]
    scores_1 = [0.1, 0.2, 0.4]

    expected_1 = [[1, 0, 0], [0, 1, 1], [0, 0, 0]]

    mod_met.compute_confusion_matrix(predictions_1, targets_1, scores_1)

    assert mod_met.model_type == ModelType.CLASSIFICATION

    for idx, value in enumerate(mod_met.confusion_matrix.labels):
        for jdx, value_2 in enumerate(mod_met.confusion_matrix.labels):
            print(idx, jdx)
            assert mod_met.confusion_matrix.confusion_matrix[idx, jdx].floats.count == expected_1[idx][jdx]
Ejemplo n.º 5
0
    def __init__(self, output_fields=None, metrics: ModelMetrics = None):
        super().__init__()

        if output_fields is None:
            output_fields = []
        self.output_fields = output_fields
        if metrics is None:
            metrics = ModelMetrics()
        self.metrics = metrics
Ejemplo n.º 6
0
def test_merge_metrics_with_none_confusion_matrix():
    metrics = ModelMetrics()
    other = ModelMetrics()
    other.confusion_matrix = None
    other.regression_metrics = None

    new_metrics = metrics.merge(other)
    assert new_metrics.model_type == ModelType.UNKNOWN
Ejemplo n.º 7
0
def tests_model_metrics_to_protobuf_regression():
    regression_model = ModelMetrics(model_type=ModelType.REGRESSION)

    targets_1 = [0.1, 0.3, 0.4]
    predictions_1 = [0.5, 0.5, 0.5]
    regression_model.compute_regression_metrics(predictions_1, targets_1)
    regression_message = regression_model.to_protobuf()
    model_metrics_from_message = ModelMetrics.from_protobuf(regression_message)
    assert model_metrics_from_message.model_type == ModelType.REGRESSION
Ejemplo n.º 8
0
def tests_model_metrics_to_protobuf():
    mod_met = ModelMetrics()

    targets_1 = ["cat", "dog", "pig"]
    predictions_1 = ["cat", "dog", "dog"]
    scores_1 = [0.1, 0.2, 0.4]

    expected_1 = [[1, 0, 0], [0, 1, 1], [0, 0, 0]]

    mod_met.compute_confusion_matrix(predictions_1, targets_1, scores_1)

    message = mod_met.to_protobuf()

    ModelMetrics.from_protobuf(message)
Ejemplo n.º 9
0
def tests_model_metrics_to_protobuf_classification():
    mod_met = ModelMetrics(model_type=ModelType.CLASSIFICATION)

    targets_1 = ["cat", "dog", "pig"]
    predictions_1 = ["cat", "dog", "dog"]
    scores_1 = [0.1, 0.2, 0.4]

    mod_met.compute_confusion_matrix(predictions_1, targets_1, scores_1)

    message = mod_met.to_protobuf()

    model_metrics = ModelMetrics.from_protobuf(message)
    assert model_metrics.model_type == ModelType.CLASSIFICATION
    assert model_metrics.confusion_matrix.labels == ["cat", "dog", "pig"]
Ejemplo n.º 10
0
def test_merge_metrics_model():
    metrics = ModelMetrics()
    other = ModelMetrics(model_type=ModelType.REGRESSION)
    other.regression_metrics = None
    new_metrics = metrics.merge(other)
    assert new_metrics.model_type == ModelType.REGRESSION
    assert new_metrics.confusion_matrix is None

    # keep initial model type during merge
    metrics = ModelMetrics(model_type=ModelType.REGRESSION)
    other = ModelMetrics(model_type=ModelType.CLASSIFICATION)
    other.regression_metrics = None
    new_metrics = metrics.merge(other)
    assert new_metrics.model_type == ModelType.REGRESSION
    assert new_metrics.confusion_matrix is None
Ejemplo n.º 11
0
def test_merge_metrics_with_none_confusion_matrix():
    metrics = ModelMetrics()
    other = ModelMetrics()
    other.confusion_matrix = None
    metrics.merge(other)
Ejemplo n.º 12
0
def test_merge_none():
    metrics = ModelMetrics()
    assert metrics.merge(None) == metrics
Ejemplo n.º 13
0
def test_model_metrics_init():
    reg_met = RegressionMetrics()
    conf_ma = ConfusionMatrix()
    with pytest.raises(NotImplementedError):
        metrics = ModelMetrics(confusion_matrix=conf_ma, regression_metrics=reg_met)
Ejemplo n.º 14
0
def test_merge_metrics_with_none_regression_matrix():
    metrics = ModelMetrics()
    other = ModelMetrics(model_type=ModelType.REGRESSION)
    other.regression_metrics = None
    new_metrics = metrics.merge(other)
    assert new_metrics.model_type == ModelType.REGRESSION
Ejemplo n.º 15
0
def test_merge_none():
    metrics = ModelMetrics()
    metrics.merge(None)