def test_segmentation_accuracy_raise_config_error_on_extra_args(self): metric_config = { 'type': 'segmentation_accuracy', 'something_extra': 'extra' } with pytest.raises(ConfigError): Metric.provide('segmentation_accuracy', metric_config, None)
def test_rmse_on_interval_raise_config_error_on_extra_args(self): metric_config = { 'type': 'rmse_on_interval', 'something_extra': 'extra' } with pytest.raises(ConfigError): Metric.provide('rmse_on_interval', metric_config, None)
def test_per_point_normed_error_raise_config_error_on_extra_args(self): metric_config = { 'type': 'per_point_normed_error', 'something_extra': 'extra' } with pytest.raises(ConfigError): Metric.provide('per_point_normed_error', metric_config, None)
def test_per_class_accuracy_raise_config_error_on_extra_args(self): adapter_config = { 'type': 'accuracy_per_class', 'something_extra': 'extra' } with pytest.raises(ConfigError): Metric.provide('accuracy_per_class', adapter_config, None)
def test_character_recognition_accuracy_raise_config_error_on_extra_args( self): adapter_config = { 'type': 'character_recognition_accuracy', 'something_extra': 'extra' } with pytest.raises(ConfigError): Metric.provide('character_recognition_accuracy', adapter_config, None)
def test_reid_map_raise_config_error_on_extra_args(self): adapter_config = {'type': 'reid_map', 'something_extra': 'extra'} with pytest.raises(ConfigError): Metric.provide('reid_map', adapter_config, None)
def test_reid_cmc_raise_config_error_on_extra_args(self): metric_config = {'type': 'cmc', 'something_extra': 'extra'} with pytest.raises(ConfigError): Metric.provide('cmc', metric_config, None)
def test_f1_score_raise_config_error_on_extra_args(self): metric_config = {'type': 'f1-score', 'something_extra': 'extra'} with pytest.raises(ConfigError): Metric.provide('f1-score', metric_config, None)
def test_detection_miss_rate_raise_config_error_on_extra_args(self): adapter_config = {'type': 'miss_rate', 'something_extra': 'extra'} with pytest.raises(ConfigError): Metric.provide('miss_rate', adapter_config, None)
def test_all_metrics_raise_config_error_on_extra_args(self): for provider in Metric.providers: adapter_config = {'type': provider, 'something_extra': 'extra'} with pytest.raises(ConfigError): Metric.provide(provider, adapter_config, None)