Esempio n. 1
0
def predict_on_eval_using_fold(fold, train_eval_data):
    _, eval_indices = indices_for_fold(fold, train_eval_data)
    eval_dataset = TrainEvalDataset(train_eval_data.iloc[eval_indices],
                                    transform=eval_transform)
    eval_data_loader = torch.utils.data.DataLoader(
        eval_dataset,
        batch_size=config.batch_size,
        num_workers=args.workers,
        worker_init_fn=worker_init_fn)

    model = Model(config.model, NUM_CLASSES)
    model = model.to(DEVICE)
    model.load_state_dict(
        torch.load(
            os.path.join(args.experiment_path, 'model_{}.pth'.format(fold))))

    model.eval()
    with torch.no_grad():
        fold_labels = []
        fold_logits = []
        fold_exps = []
        fold_ids = []

        for images, feats, exps, labels, ids in tqdm(
                eval_data_loader, desc='fold {} evaluation'.format(fold)):
            images, feats, labels = images.to(DEVICE), feats.to(
                DEVICE), labels.to(DEVICE)
            logits = model(images, feats)

            fold_labels.append(labels)
            fold_logits.append(logits)
            fold_exps.extend(exps)
            fold_ids.extend(ids)

        fold_labels = torch.cat(fold_labels, 0)
        fold_logits = torch.cat(fold_logits, 0)

        tmp = train_eval_data.iloc[eval_indices].copy()
        temp, _, _ = find_temp_global(input=fold_logits,
                                      target=fold_labels,
                                      exps=fold_exps)
        classes = assign_classes(probs=(fold_logits *
                                        temp).softmax(1).data.cpu().numpy(),
                                 exps=fold_exps)
        print('{:.2f}'.format((tmp['sirna'] == classes).mean()))
        tmp['sirna'] = classes
        tmp.to_csv(os.path.join(args.experiment_path,
                                'eval_{}.csv'.format(fold)),
                   index=False)

        return fold_labels, fold_logits, fold_exps, fold_ids
Esempio n. 2
0
def predict_on_eval_using_fold(fold, train_eval_data):
    _, eval_indices = indices_for_fold(fold, train_eval_data)
    eval_dataset = TrainEvalDataset(train_eval_data.iloc[eval_indices],
                                    transform=eval_transform)
    eval_data_loader = torch.utils.data.DataLoader(
        eval_dataset,
        batch_size=config.batch_size // 4,
        num_workers=args.workers,
        worker_init_fn=worker_init_fn)

    model = Model(config.model, NUM_CLASSES)
    model = model.to(DEVICE)
    model.load_state_dict(
        torch.load(
            os.path.join(args.experiment_path, 'model_{}.pth'.format(fold))))

    model.eval()
    with torch.no_grad():
        fold_labels = []
        fold_probs = []
        fold_exps = []
        fold_ids = []

        for images, feats, exps, labels, ids in tqdm(
                eval_data_loader, desc='fold {} evaluation'.format(fold)):
            images, feats, labels = images.to(DEVICE), feats.to(
                DEVICE), labels.to(DEVICE)

            b, n, c, h, w = images.size()
            assert n == 2 * NUM_TTA
            images = images.view(b * n, c, h, w)
            feats = feats.view(b, 1, 2).repeat(1, n, 1).view(b * n, 2)
            logits = model(images, feats)
            logits = logits.view(b, n, NUM_CLASSES)
            probs = softmax(logits)

            fold_labels.append(labels)
            fold_probs.append(probs)
            fold_exps.extend(exps)
            fold_ids.extend(ids)

        fold_labels = torch.cat(fold_labels, 0)
        fold_probs = torch.cat(fold_probs, 0)
        fold_plates = train_eval_data.iloc[eval_indices]['plate'].values

        return fold_labels, fold_probs, fold_exps, fold_plates, fold_ids
Esempio n. 3
0
def predict_on_eval_using_fold(fold, train_eval_data):
    _, eval_indices = indices_for_fold(fold, train_eval_data)
    eval_dataset = TrainEvalDataset(train_eval_data.iloc[eval_indices], transform=eval_transform)
    eval_data_loader = torch.utils.data.DataLoader(
        eval_dataset,
        batch_size=config.batch_size,
        num_workers=args.workers,
        worker_init_fn=worker_init_fn)

    model = Model(config.model, NUM_CLASSES)
    model = model.to(DEVICE)
    model.load_state_dict(torch.load(os.path.join(args.experiment_path, 'model_{}.pth'.format(fold))))

    model.eval()
    with torch.no_grad():
        fold_labels = []
        fold_logits = []
        fold_exps = []
        fold_ids = []

        for images, feats, exps, labels, ids in tqdm(eval_data_loader, desc='fold {} evaluation'.format(fold)):
            images, feats, labels = images.to(DEVICE), feats.to(DEVICE), labels.to(DEVICE)

            b, n, c, h, w = images.size()
            images = images.view(b * n, c, h, w)
            feats = feats.view(b, 1, 2).repeat(1, n, 1).view(b * n, 2)
            logits = model(images, feats)
            logits = logits.view(b, n, NUM_CLASSES)

            fold_labels.append(labels)
            fold_logits.append(logits)
            fold_exps.extend(exps)
            fold_ids.extend(ids)

        fold_labels = torch.cat(fold_labels, 0)
        fold_logits = torch.cat(fold_logits, 0)

        fold_plates = train_eval_data.iloc[eval_indices]['plate'].values
        temp, _, _ = find_temp_global(input=fold_logits, target=fold_labels, exps=fold_exps)
        classes = assign_classes(probs=to_prob(fold_logits, temp).data.cpu().numpy(), exps=fold_exps)
        fold_logits = refine_scores(
            fold_logits, classes, exps=fold_exps, plates=fold_plates, value=float('-inf'))

        return fold_labels, fold_logits, fold_exps, fold_ids
Esempio n. 4
0
def predict_on_test_using_fold(fold, test_data):
    test_dataset = TestDataset(test_data, transform=test_transform)
    test_data_loader = torch.utils.data.DataLoader(
        test_dataset,
        batch_size=config.batch_size // 2,
        num_workers=args.workers,
        worker_init_fn=worker_init_fn)

    model = Model(config.model, NUM_CLASSES)
    model = model.to(DEVICE)
    model.load_state_dict(
        torch.load(
            os.path.join(args.experiment_path, 'model_{}.pth'.format(fold))))

    model.eval()
    with torch.no_grad():
        fold_logits = []
        fold_exps = []
        fold_plates = []
        fold_ids = []

        for images, feats, exps, plates, ids in tqdm(
                test_data_loader, desc='fold {} inference'.format(fold)):
            images, feats = images.to(DEVICE), feats.to(DEVICE)

            b, n, c, h, w = images.size()
            images = images.view(b * n, c, h, w)
            feats = feats.view(b, 1, 2).repeat(1, n, 1).view(b * n, 2)
            logits = model(images, feats)
            logits = logits.view(b, n, NUM_CLASSES)

            fold_logits.append(logits)
            fold_exps.extend(exps)
            fold_plates.extend(plates)
            fold_ids.extend(ids)

        fold_logits = torch.cat(fold_logits, 0)

    torch.save((fold_logits, fold_exps, fold_ids),
               './test_{}.pth'.format(fold))

    return fold_logits, fold_exps, fold_plates, fold_ids
Esempio n. 5
0
def compute_features_using_fold(fold, data):
    dataset = TestDataset(data, transform=test_transform)
    data_loader = torch.utils.data.DataLoader(dataset,
                                              batch_size=config.batch_size //
                                              2,
                                              num_workers=args.workers,
                                              worker_init_fn=worker_init_fn)

    model = Model(config.model, NUM_CLASSES, return_features=True)
    model = model.to(DEVICE)
    model.load_state_dict(
        torch.load(
            os.path.join(args.experiment_path, 'model_{}.pth'.format(fold))))

    model.eval()
    with torch.no_grad():
        fold_embs = []
        fold_exps = []
        fold_ids = []

        for images, feats, exps, ids in tqdm(
                data_loader, desc='fold {} inference'.format(fold)):
            images, feats = images.to(DEVICE), feats.to(DEVICE)

            b, n, c, h, w = images.size()
            images = images.view(b * n, c, h, w)
            feats = feats.view(b, 1, 2).repeat(1, n, 1).view(b * n, 2)
            _, embds = model(images, feats)
            embds = embds.view(b, n, embds.size(1))

            fold_embs.append(embds)
            fold_exps.extend(exps)
            fold_ids.extend(ids)

        fold_embs = torch.cat(fold_embs, 0)

    return fold_embs, fold_exps, fold_ids