コード例 #1
0
def get_imagenet_iterator(root,
                          batch_size,
                          num_workers,
                          data_shape=224,
                          dtype='float32'):
    """Dataset loader with preprocessing."""
    train_dir = os.path.join(root, 'train')
    train_transform, val_transform = get_imagenet_transforms(data_shape, dtype)
    logging.info("Loading image folder %s, this may take a bit long...",
                 train_dir)
    train_dataset = ImageFolderDataset(train_dir, transform=train_transform)
    train_data = DataLoader(train_dataset,
                            batch_size,
                            shuffle=True,
                            last_batch='discard',
                            num_workers=num_workers)
    val_dir = os.path.join(root, 'val')
    if not os.path.isdir(
            os.path.expanduser(os.path.join(root, 'val', 'n01440764'))):
        user_warning = 'Make sure validation images are stored in one subdir per category, a helper script is available at https://git.io/vNQv1'
        raise ValueError(user_warning)
    logging.info("Loading image folder %s, this may take a bit long...",
                 val_dir)
    val_dataset = ImageFolderDataset(val_dir, transform=val_transform)
    val_data = DataLoader(val_dataset,
                          batch_size,
                          last_batch='keep',
                          num_workers=num_workers)
    return DataLoaderIter(train_data, dtype), DataLoaderIter(val_data, dtype)
コード例 #2
0
def get_caltech101_iterator(batch_size, num_workers, dtype):
    def transform(image, label):
        # resize the shorter edge to 224, the longer edge will be greater or equal to 224
        resized = mx.image.resize_short(image, 224)
        # center and crop an area of size (224,224)
        cropped, crop_info = mx.image.center_crop(resized, (224, 224))
        # transpose the channels to be (3,224,224)
        transposed = mx.nd.transpose(cropped, (2, 0, 1))
        return transposed, label

    training_path, testing_path = get_caltech101_data()
    dataset_train = ImageFolderDataset(root=training_path, transform=transform)
    dataset_test = ImageFolderDataset(root=testing_path, transform=transform)

    train_data = DataLoader(dataset_train, batch_size, shuffle=True, num_workers=num_workers)
    test_data = DataLoader(dataset_test, batch_size, shuffle=False, num_workers=num_workers)
    return DataLoaderIter(train_data), DataLoaderIter(test_data)
コード例 #3
0
def get_train_val_dataset(
        mixup=False, root='../CIFAR10/data/kaggle_cifar10/train_valid_test/'):
    # use mixup dataset
    """
    :param args: 
    :param root: str 
        root path of the data
    :return: 
    """
    if mixup:
        train_filename = 'train_valid'
    else:
        train_filename = 'train'
    valid_filename = 'valid'
    train_filepath = os.path.join(root + train_filename)
    valid_filepath = os.path.join(root + valid_filename)
    # get train_dataset
    train_dataset = ImageFolderDataset(root=train_filepath)
    valid_dataset = ImageFolderDataset(root=valid_filepath)
    return train_dataset, valid_dataset
        img1 = img1.reshape((1, 112, 92))
            
        if self.should_invert:
            img0 = PIL.ImageOps.invert(img0)
            img1 = PIL.ImageOps.invert(img1)

        if self.transform:
            img0 = mx.image.imresize(img0, 100, 100)
            img1 = mx.image.imresize(img1, 100, 100)
        return img0, img1 , nd.array([int(img1_tuple[1]!=img0_tuple[1])],dtype=np.float32)
    
    def __len__(self):
        return len(self.imageFolderDataset)
    
    
folder_dataset = ImageFolderDataset(root=Config.training_dir)   
siamese_dataset = SiameseNetworkDataset(imageFolderDataset=folder_dataset,
                                        transform=False ,should_invert=False)

class SiameseNetwork(nn.Block):  
    def __init__(self, verbose=False,**kwargs):
        super(SiameseNetwork, self).__init__(**kwargs)
        self.verbose = verbose
        # add name_scope on the outer most Sequential
        with self.name_scope():
            # block 1
            b1 = nn.Sequential()
            b1.add(
                nn.Conv2D(4, kernel_size=3, padding=1, activation='relu'),
                nn.BatchNorm(),
                nn.Dropout(0.2)
コード例 #5
0
    num_workers = 12
    num_gpus = 2
    ctx = [mx.gpu(i) for i in range(num_gpus)]

    net = ResidualAttentionModel()
    net.hybridize(static_alloc=True)
    net.initialize(init=mx.init.MSRAPrelu(), ctx=ctx)

    trainer = gluon.Trainer(net.collect_params(), 'nag', {
        'learning_rate': lr,
        'momentum': 0.9,
        'wd': wd
    })

    train_data = gluon.data.DataLoader(ImageFolderDataset(
        '/system1/Dataset/ImageNet/ILSVRC2012_img_train',
        transform=transformer),
                                       batch_size=batch_size,
                                       shuffle=True,
                                       num_workers=num_workers,
                                       last_batch='discard')

    val_data = gluon.data.DataLoader(ImageFolderDataset(
        '/system1/Dataset/ImageNet/test', transform=trans_test),
                                     batch_size=batch_size,
                                     num_workers=num_workers)

    train(train_net=net,
          iterations=iterations,
          trainer=trainer,
          ctx=ctx,
コード例 #6
0
    # logging params
    logging.basicConfig(
        level=logging.INFO,
        handlers=[
            logging.StreamHandler(),
            logging.FileHandler('./logs/%s_shape_331_dataset_mean_std.log' %
                                pretrianed_model_name)
        ])
    logging.info(
        '100 classes, RandomCropAug = 331, rand_gray = 0.3 , ColorJitterAug(0.3, 0.3, 0.3), dataset_mean_std.'
    )
    model_weight_save_path = './weights/%s_dense_512_100_crop_331_gray_0.3_colorJitterAug_0.3_dataset_mean_std.params' % pretrianed_model_name

    # 定义训练集的 DataLoader
    train_data = DataLoader(ImageFolderDataset(train_path,
                                               transform=transform_train),
                            batch_size=batch_size,
                            shuffle=True,
                            num_workers=4)

    # 定义验证集的 DataLoader
    val_data = DataLoader(ImageFolderDataset(val_path,
                                             transform=transform_val),
                          batch_size=batch_size,
                          shuffle=False,
                          num_workers=4)

    # 定义网络
    net = Model(out_classes=classes,
                pretrained_model_name=pretrianed_model_name,
                pretrained=pretrained,
コード例 #7
0
opt.manualSeed = 10  # random.randint(1, 10000) # fix seed
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
mx.random.seed(opt.manualSeed)

if opt.cuda:
    context = mx.gpu(0)
else:
    context = mx.cpu()

if opt.dataset in ['imagenet', 'folder', 'lfw']:
    # folder dataset
    dataset = ImageFolderDataset(root=opt.dataroot).transform_first(transforms.Compose([
        gluon.data.vision.transforms.Resize(opt.imageSize, keep_ratio=True, interpolation=3),
        transforms.CenterCrop(opt.imageSize),
        transforms.ToTensor(),
        transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
    ]))
elif opt.dataset == 'cifar10':
    dataset = CIFAR10(root=opt.dataroot, train=True).transform_first(transforms.Compose([
        gluon.data.vision.transforms.Resize(opt.imageSize, keep_ratio=True, interpolation=3),
        transforms.ToTensor(),
        transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
    ])
    )
elif opt.dataset == 'mnist':
    dataset = MNIST(root=opt.dataroot, train=True).transform_first(transforms.Compose([
        gluon.data.vision.transforms.Resize(opt.imageSize, keep_ratio=True, interpolation=3),
        transforms.ToTensor(),
        transforms.Normalize((0.5), (0.5)),
    ]))
コード例 #8
0
def get_test_dataset(root='../CIFAR10/data/kaggle_cifar10/'):
    test_filename = 'test'
    test_filepath = os.path.join(root, test_filename)
    test_dataset = ImageFolderDataset(root=test_filepath)
    return test_dataset