def _test(n_epochs, metric_device): metric_device = torch.device(metric_device) n_iters = 80 s = 16 n_classes = 2 offset = n_iters * s y_true = torch.rand(size=(offset * idist.get_world_size(), )).to(device) y_preds = torch.rand(size=(offset * idist.get_world_size(), )).to(device) def update(engine, i): return ( y_preds[i * s + rank * offset:(i + 1) * s + rank * offset], y_true[i * s + rank * offset:(i + 1) * s + rank * offset], ) engine = Engine(update) m = MeanNormalizedBias(device=metric_device) m.attach(engine, "mnb") data = list(range(n_iters)) engine.run(data=data, max_epochs=n_epochs) assert "mnb" in engine.state.metrics res = engine.state.metrics["mnb"] np_y_true = y_true.cpu().numpy() np_y_preds = y_preds.cpu().numpy() np_sum = ((np_y_true - np_y_preds) / np_y_true).sum() np_len = len(np_y_preds) np_ans = np_sum / np_len assert pytest.approx(res) == np_ans
def _test(y_pred, y, batch_size): def update_fn(engine, batch): idx = (engine.state.iteration - 1) * batch_size y_true_batch = np_y[idx:idx + batch_size] y_pred_batch = np_y_pred[idx:idx + batch_size] return torch.from_numpy(y_pred_batch), torch.from_numpy( y_true_batch) engine = Engine(update_fn) m = MeanNormalizedBias() m.attach(engine, "mnb") np_y = y.numpy() np_y_pred = y_pred.numpy() data = list(range(y_pred.shape[0] // batch_size)) mnb = engine.run(data, max_epochs=1).metrics["mnb"] np_sum = ((np_y - np_y_pred) / np_y).sum() np_len = len(np_y_pred) np_ans = np_sum / np_len assert np_ans == pytest.approx(mnb)