Ejemplo n.º 1
0
def test_integration_ap_score_with_activated_output_transform():

    np.random.seed(1)
    size = 100
    np_y_pred = np.random.rand(size, 1)
    np_y_pred_softmax = torch.softmax(torch.from_numpy(np_y_pred),
                                      dim=1).numpy()
    np_y = np.zeros((size, ), dtype=np.long)
    np_y[size // 2:] = 1
    np.random.shuffle(np_y)

    np_ap = average_precision_score(np_y, np_y_pred_softmax)

    batch_size = 10

    def update_fn(engine, batch):
        idx = (engine.state.iteration - 1) * batch_size
        y_true_batch = np_y[idx:idx + batch_size]
        y_pred_batch = np_y_pred[idx:idx + batch_size]
        return idx, torch.from_numpy(y_pred_batch), torch.from_numpy(
            y_true_batch)

    engine = Engine(update_fn)

    ap_metric = AveragePrecision(
        output_transform=lambda x: (torch.softmax(x[1], dim=1), x[2]))
    ap_metric.attach(engine, 'ap')

    data = list(range(size // batch_size))
    ap = engine.run(data, max_epochs=1).metrics['ap']

    assert ap == np_ap
Ejemplo n.º 2
0
    def _test(y_preds, y_true, n_epochs, metric_device, update_fn):
        metric_device = torch.device(metric_device)

        engine = Engine(update_fn)

        ap = AveragePrecision(device=metric_device)
        ap.attach(engine, "ap")

        data = list(range(n_iters))
        engine.run(data=data, max_epochs=n_epochs)

        assert "ap" in engine.state.metrics

        res = engine.state.metrics["ap"]

        true_res = average_precision_score(y_true.cpu().numpy(),
                                           y_preds.cpu().numpy())
        assert pytest.approx(res) == true_res
Ejemplo n.º 3
0
    def _test(n_epochs, metric_device):
        metric_device = torch.device(metric_device)
        n_iters = 80
        s = 16
        n_classes = 2

        offset = n_iters * s
        y_true = torch.randint(0,
                               n_classes,
                               size=(offset * idist.get_world_size(),
                                     10)).to(device)
        y_preds = torch.randint(0,
                                n_classes,
                                size=(offset * idist.get_world_size(),
                                      10)).to(device)

        def update(engine, i):
            return (
                y_preds[i * s + rank * offset:(i + 1) * s + rank * offset, :],
                y_true[i * s + rank * offset:(i + 1) * s + rank * offset, :],
            )

        engine = Engine(update)

        ap = AveragePrecision(device=metric_device)
        ap.attach(engine, "ap")

        data = list(range(n_iters))
        engine.run(data=data, max_epochs=n_epochs)

        assert "ap" in engine.state.metrics

        res = engine.state.metrics["ap"]
        if isinstance(res, torch.Tensor):
            res = res.cpu().numpy()

        true_res = average_precision_score(y_true.cpu().numpy(),
                                           y_preds.cpu().numpy())

        assert pytest.approx(res) == true_res
Ejemplo n.º 4
0
    def _test(y_pred, y, batch_size):
        def update_fn(engine, batch):
            idx = (engine.state.iteration - 1) * batch_size
            y_true_batch = np_y[idx:idx + batch_size]
            y_pred_batch = np_y_pred[idx:idx + batch_size]
            return idx, torch.from_numpy(y_pred_batch), torch.from_numpy(
                y_true_batch)

        engine = Engine(update_fn)

        ap_metric = AveragePrecision(output_transform=lambda x: (x[1], x[2]))
        ap_metric.attach(engine, "ap")

        np_y = y.numpy()
        np_y_pred = y_pred.numpy()

        np_ap = average_precision_score(np_y, np_y_pred)

        data = list(range(y_pred.shape[0] // batch_size))
        ap = engine.run(data, max_epochs=1).metrics["ap"]

        assert isinstance(ap, float)
        assert np_ap == pytest.approx(ap)