def get_dataloader(batch_size):
    train_data = KgForestDataset(
        split='train-40479',
        transform=Compose([
            Lambda(lambda x: randomShiftScaleRotate(
                x, u=0.75, shift_limit=6, scale_limit=6, rotate_limit=45)),
            Lambda(lambda x: randomFlip(x)),
            Lambda(lambda x: randomTranspose(x)),
            Lambda(lambda x: toTensor(x)),
            Normalize(mean=mean, std=std)
        ]),
        height=256,
        width=256)
    train_data_loader = DataLoader(batch_size=batch_size,
                                   dataset=train_data,
                                   shuffle=True)

    validation = KgForestDataset(
        split='validation-3000',
        transform=Compose([
            # Lambda(lambda x: randomShiftScaleRotate(x, u=0.75, shift_limit=6, scale_limit=6, rotate_limit=45)),
            Lambda(lambda x: randomFlip(x)),
            Lambda(lambda x: randomTranspose(x)),
            Lambda(lambda x: toTensor(x)),
            Normalize(mean=mean, std=std)
        ]),
        height=256,
        width=256)

    valid_dataloader = DataLoader(dataset=validation,
                                  shuffle=False,
                                  batch_size=batch_size)
    return train_data_loader, valid_dataloader
Exemple #2
0
def test():
    net = nn.DataParallel(densenet161().cuda())
    net.load_state_dict(torch.load('models/densenet161.pth'))
    net.eval()

    dataset = KgForestDataset(split='test-61191',
                              transform=Compose([
                                  Lambda(lambda x: toTensor(x)),
                                  Normalize(mean=mean, std=std)
                              ]),
                              height=256,
                              width=256,
                              label_csv=None)

    test_loader = DataLoader(dataset,
                             batch_size=512,
                             shuffle=False,
                             pin_memory=True)
    probs = predict(net, test_loader)

    # probs = np.empty((61191, 17))
    # current = 0
    # for batch_idx, (images, im_ids) in enumerate(test_loader):
    #     num = images.size(0)
    #     previous = current
    #     current = previous + num
    #     logits = net(Variable(images.cuda(), volatile=True))
    #     prob = F.sigmoid(logits)
    #     probs[previous:current, :] = prob.data.cpu().numpy()
    #     print('Batch Index ', batch_idx)

    pred_csv(probs, name='densenet161', threshold=BEST_THRESHOLD)
def get_test_dataloader():
    test_dataset = KgForestDataset(split='test-61191',
                                   transform=Compose([
                                       Lambda(lambda x: toTensor(x)),
                                       Normalize(mean=mean, std=std)
                                   ]),
                                   label_csv=None)

    test_dataloader = DataLoader(test_dataset, batch_size=16)
    return test_dataloader
def get_validation_loader():
    validation = KgForestDataset(split='train-40479',
                                 transform=Compose([
                                     Lambda(lambda x: toTensor(x)),
                                     Normalize(mean=mean, std=std)
                                 ]),
                                 height=256,
                                 width=256)
    valid_dataloader = DataLoader(validation, batch_size=16, shuffle=False)
    return valid_dataloader
        threshold[i] = best_thresh
        print(i, best_score, best_thresh)
    return threshold


if __name__ == '__main__':
    model1 = nn.DataParallel(densenet161(pretrained=True).cuda())
    model1.load_state_dict(torch.load('../models/densenet161.pth'))
    model1.cuda().eval()
    validation = KgForestDataset(
        split='validation-3000',
        transform=Compose(
            [
                Lambda(lambda x: randomFlip(x)),
                Lambda(lambda x: randomTranspose(x)),
                Lambda(lambda x: toTensor(x)),
                Normalize(mean=mean, std=std)
            ]
        ),
        height=256,
        width=256
    )

    valid_dataloader = DataLoader(dataset=validation, shuffle=False, batch_size=512)

    threshold = np.zeros(17)
    for i in range(10):
        threshold += optimize_threshold([model1], [valid_dataloader])
    print(threshold/10)