Пример #1
0
def test_multilabel_accuracy_mean(
    outputs_list: Iterable[torch.Tensor],
    targets_list: Iterable[torch.Tensor],
    thresholds: Union[float, torch.Tensor],
    true_values_list: Iterable[float],
) -> None:
    """
    This test checks that all the intermediate metrics values are correct during accumulation.

    Args:
        outputs_list: list of output tensors
        targets_list: list of true answer tensors
        thresholds: threshold
        true_values_list: true intermediate metric results
    """
    metric = MultilabelAccuracyMetric(threshold=thresholds)
    for outputs, targets, true_value in zip(outputs_list, targets_list,
                                            true_values_list):
        metric.update(outputs=outputs, targets=targets)
        mean, _ = metric.compute()
        assert np.isclose(mean, true_value)
Пример #2
0
def test_multilabel_accuracy_std(
    outputs_list: Iterable[torch.Tensor],
    targets_list: Iterable[torch.Tensor],
    thresholds: Union[float, torch.Tensor],
    true_values_list: Iterable[float],
) -> None:
    """
    This test checks that all the intermediate metrics values are correct during accumulation.
    Note that now `accuracy/std` is not std exactly so it can fail if you fix it.

    Args:
        outputs_list: list of output tensors
        targets_list: list of true answer tensors
        thresholds: threshold
        true_values_list: true intermediate metric results
    """
    metric = MultilabelAccuracyMetric(threshold=thresholds)
    for outputs, targets, true_value in zip(outputs_list, targets_list,
                                            true_values_list):
        metric.update(outputs=outputs, targets=targets)
        _, std = metric.compute()
        assert np.isclose(std, true_value)
Пример #3
0
def test_multilabel_accuracy(
    outputs: torch.Tensor,
    targets: torch.Tensor,
    thresholds: Union[float, torch.Tensor],
    true_values: Dict[str, float],
) -> None:
    """
    Test multilabel accuracy metric with single and multiple thresholds.
    Note that now `accuracy/std` is not std exactly so it can fail if you fix it.

    Args:
        outputs: tensor of outputs
        targets: tensor of true answers
        thresholds: thresholds for multilabel classification
        true_values: expected metric value
    """
    metric = MultilabelAccuracyMetric(threshold=thresholds)
    metric.update(outputs=outputs, targets=targets)
    values = metric.compute_key_value()
    for key in true_values.keys():
        assert key in true_values
        assert np.isclose(true_values[key], values[key])
Пример #4
0
 def __init__(
     self,
     input_key: str,
     target_key: str,
     threshold: Union[float, torch.Tensor] = 0.5,
     log_on_batch: bool = True,
     prefix: str = None,
     suffix: str = None,
 ):
     """Init."""
     super().__init__(
         metric=MultilabelAccuracyMetric(threshold=threshold, prefix=prefix, suffix=suffix),
         input_key=input_key,
         target_key=target_key,
         log_on_batch=log_on_batch,
     )