def test_accuracy_meter_registry(self): accuracy_meter = meters.build_meter({ "name": "video_accuracy", "topk": [1, 2], "clips_per_video_train": 1, "clips_per_video_test": 2, }) self.assertTrue(isinstance(accuracy_meter, VideoAccuracyMeter))
def test_accuracy_meter_registry(self): meter = build_meter({ "name": "accuracy_list_meter", "num_meters": 1, "topk_values": [1, 3], "meter_names": [], }) self.assertTrue(isinstance(meter, AccuracyListMeter))
def test_precision_meter_registry(self): meter = build_meter({ "name": "precision_at_k_list_meter", "num_meters": 1, "topk_values": [1, 3], "meter_names": [], }) self.assertTrue(isinstance(meter, PrecisionAtKListMeter))
def _build_meters(self): """ Returns meters for task. """ meter_name = self.config["METERS"].get("name", "") if not meter_name: return [] meter_params = self.config["METERS"][meter_name] meter_config = {"name": meter_name, **meter_params} return [build_meter(meter_config)]
def test_build_meters(self): configs = [ { "name": "accuracy_list_meter", "num_meters": 1, "topk_values": [1, 3], "meter_names": [], }, { "name": "precision_at_k_list_meter", "num_meters": 1, "topk_values": [1, 3], "meter_names": [], }, { "name": "recall_at_k_list_meter", "num_meters": 1, "topk_values": [1, 3], "meter_names": [], }, ] for config in configs: build_meter(config)
def _build_meters(self): """ Returns meters for task. """ meter_names = self.config["METERS"].get("names", []) if not meter_names: return [] meters = [] for meter_name in meter_names: meter_params = self.config["METERS"][meter_name] meter_config = {"name": meter_name, **meter_params} meters.append(build_meter(meter_config)) return meters
def test_multi_update(self): meters = [] configs = [ { "name": "accuracy_list_meter", "num_meters": 1, "topk_values": [1, 3], "meter_names": [], }, { "name": "precision_at_k_list_meter", "num_meters": 1, "topk_values": [1, 3], "meter_names": [], }, { "name": "recall_at_k_list_meter", "num_meters": 1, "topk_values": [1, 3], "meter_names": [], }, ] for config in configs: meters.append(build_meter(config)) # One-hot encoding, 1 = positive for class # sample-1: 1, sample-2: 0, sample-3: 0,1,2 target = torch.tensor([[0, 1, 0], [1, 0, 0], [1, 1, 1]]) prev_values = {} for _ in range(50): # Batchsize = 3, num classes = 3, score is probability of class model_output = torch.rand((3, 3)).softmax(dim=1).cpu() for i, meter in enumerate(meters): if i in prev_values.keys(): assert str(meter.value) == prev_values[i] meter.update(model_output, target.cpu()) prev_values[i] = str(meter.value)
def test_precision_meter_registry(self): meter = meters.build_meter({"name": "precision_at_k", "topk": [1, 3]}) self.assertTrue(isinstance(meter, PrecisionAtKMeter))
def test_accuracy_meter_registry(self): accuracy_meter = meters.build_meter({ "name": "accuracy", "topk": [1, 2] }) self.assertTrue(isinstance(accuracy_meter, AccuracyMeter))
def test_recall_meter_registry(self): meter = meters.build_meter({"name": "recall_at_k", "topk": [1, 3]}) self.assertTrue(isinstance(meter, RecallAtKMeter))