Exemple #1
0
def create_iterator(opt, mode):
    if opt.dataset.startswith('CIFAR'):
        convert = tnt.transform.compose([
            lambda x: x.astype(np.float32),
            T.Normalize([125.3, 123.0, 113.9], [63.0, 62.1, 66.7]),
            lambda x: x.transpose(2,0,1),
            torch.from_numpy,
        ])

        train_transform = tnt.transform.compose([
            T.RandomHorizontalFlip(),
            T.Pad(opt.randomcrop_pad, cv2.BORDER_REFLECT),
            T.RandomCrop(32),
            convert,
        ])

        ds = getattr(datasets, opt.dataset)(opt.dataroot, train=mode, download=True)
        smode = 'train' if mode else 'test'
        ds = tnt.dataset.TensorDataset([getattr(ds, smode + '_data'),
                                        getattr(ds, smode + '_labels')])
        ds = ds.transform({0: train_transform if mode else convert})
        return ds.parallel(batch_size=opt.batchSize, shuffle=mode,
                           num_workers=opt.nthread, pin_memory=True)

    elif opt.dataset == 'ImageNet':

        def cvload(path):
            img = cv2.imread(path, cv2.IMREAD_COLOR)
            img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
            return img

        convert = tnt.transform.compose([
            lambda x: x.astype(np.float32) / 255.0,
            T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
            lambda x: x.transpose(2, 0, 1).astype(np.float32),
            torch.from_numpy,
        ])

        print("| setting up data loader...")
        if mode:
            traindir = os.path.join(opt.dataroot, 'train')
            ds = datasets.ImageFolder(traindir, tnt.transform.compose([
                T.RandomSizedCrop(224),
                T.RandomHorizontalFlip(),
                convert,
            ]), loader=cvload)
        else:
            valdir = os.path.join(opt.dataroot, 'val')
            ds = datasets.ImageFolder(valdir, tnt.transform.compose([
                T.Scale(256),
                T.CenterCrop(224),
                convert,
            ]), loader=cvload)

        return torch.utils.data.DataLoader(ds,
                                           batch_size=opt.batchSize, shuffle=mode,
                                           num_workers=opt.nthread, pin_memory=False)
    else:
        raise ValueError('dataset not understood')
Exemple #2
0
def create_dataset(opt, mode):
    convert = tnt.transform.compose([
        lambda x: x.astype(np.float32),
        T.Normalize(np.asarray([125.3, 123.0, 113.9], dtype=np.float32),
                    np.asarray([63.0, 62.1, 66.7], dtype=np.float32)),
        lambda x: x.transpose(2, 0, 1),
        torch.from_numpy,
    ])

    if mode:
        convert = tnt.transform.compose([
            T.RandomHorizontalFlip(),
            T.Pad(opt.randomcrop_pad, cv2.BORDER_REFLECT),
            T.RandomCrop(32),
            convert,
        ])

    ds = getattr(datasets, opt.dataset)(opt.dataroot,
                                        train=mode,
                                        download=True)
    smode = 'train' if mode else 'test'
    ds = tnt.dataset.TensorDataset(
        [getattr(ds, smode + '_data'),
         getattr(ds, smode + '_labels')])
    return ds.transform({0: convert})
Exemple #3
0
def main():

    # parse input arguments
    args = parser.parse_args()

    def cvload(path):
        img = cv2.imread(path, cv2.IMREAD_COLOR)
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        return img

    # set up data loader
    print("| setting up data loader...")
    valdir = os.path.join(args.imagenetpath, 'val')
    ds = datasets.ImageFolder(valdir, tnt.transform.compose([
            cvtransforms.Scale(256),
            cvtransforms.CenterCrop(224),
            lambda x: x.astype(np.float32) / 255.0,
            cvtransforms.Normalize(mean=[0.485, 0.456, 0.406],
                                   std=[0.229, 0.224, 0.225]),
            lambda x: x.transpose(2,0,1).astype(np.float32),
            torch.from_numpy,
            ]), loader = cvload)
    train_loader = torch.utils.data.DataLoader(ds,
        batch_size=256, shuffle=False,
        num_workers=args.numthreads, pin_memory=False)

    params = hkl.load(args.model)
    params = {k: Variable(torch.from_numpy(v).cuda()) for k,v in params.iteritems()}

    f = define_model(params)

    class_err = tnt.meter.ClassErrorMeter(topk=[1,5], accuracy=True)

    for sample in tqdm(train_loader):
        inputs = Variable(sample[0].cuda(), volatile=True)
        targets = sample[1]
        class_err.add(f(inputs, params).data, targets)

    print 'Validation top1/top5 accuracy:'
    print class_err.value()
Exemple #4
0
def get_iterator(opt, mode):
    def cvload(path):
        img = cv2.imread(path, cv2.IMREAD_COLOR)
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        return img

    convert = tnt.transform.compose([
        lambda x: x.astype(np.float32) / 255.0,
        cvtransforms.Normalize(mean=[0.485, 0.456, 0.406],
                               std=[0.229, 0.224, 0.225]),
        lambda x: x.transpose(2, 0, 1).astype(np.float32),
        torch.from_numpy,
    ])
    print("| setting up data loader...")
    if mode:
        traindir = os.path.join(opt.imagenetpath, 'train')
        ds = torchvision.datasets.ImageFolder(
            traindir,
            tnt.transform.compose([
                cvtransforms.RandomSizedCrop(224),
                cvtransforms.RandomHorizontalFlip(),
                convert,
            ]),
            loader=cvload)
    else:
        valdir = os.path.join(opt.imagenetpath, 'val')
        ds = torchvision.datasets.ImageFolder(valdir,
                                              tnt.transform.compose([
                                                  cvtransforms.Scale(256),
                                                  cvtransforms.CenterCrop(224),
                                                  convert,
                                              ]),
                                              loader=cvload)
    return DataLoader(ds,
                      batch_size=opt.batchSize,
                      shuffle=mode,
                      num_workers=opt.nthread,
                      pin_memory=False)
def create_dataset(opt, mode):

    convert = tnt.transform.compose([
        lambda x: x.astype(np.float32),
        cvtransforms.Normalize([125.3, 123.0, 113.9], [63.0, 62.1, 66.7]),
        lambda x: x.transpose(2, 0, 1).astype(np.float32),
        torch.from_numpy,
    ])

    train_transform = tnt.transform.compose([
        cvtransforms.RandomHorizontalFlip(),
        cvtransforms.Pad(opt.randomcrop_pad, cv2.BORDER_REFLECT),
        cvtransforms.RandomCrop(32),
        convert,
    ])

    # opt.data_root = './', opt.dataset = 'CIFAR10'
    ds = getattr(datasets, opt.dataset)(opt.data_root,
                                        train=mode,
                                        download=True)

    # mode can be True and False
    # if mode:
    #   smode = 'train'
    # else:
    #   smode = 'test'
    smode = 'train' if mode else 'test'

    # for training data, getattr(ds, 'train_data'): numpy array of size 50000 x 3 x 32 x 32
    #                    getattr(ds, 'train_labels'): list of size 5000, each of value from 0 to 9
    ds = tnt.dataset.TensorDataset([
        getattr(ds, smode + '_data').transpose(0, 2, 3, 1),
        getattr(ds, smode + '_labels')
    ])
    return ds.transform({
        0: train_transform if mode else convert
    })  # mode is True, use train_transform, mode is False, using convert