Example #1
0
def get_data(dataset, data_path, cutout_length, validation):
    """ Get torchvision dataset """
    dataset = dataset.lower()

    if dataset == 'cifar10':
        dset_cls = dset.CIFAR10
        n_classes = 10
    elif dataset == 'cifar100':
        dset_cls = dset.CIFAR100
        n_classes = 100
    elif dataset == 'imagenet':
        n_classes = 200
    elif dataset == 'mnist':
        dset_cls = dset.MNIST
        n_classes = 10
    elif dataset == 'fashionmnist':
        dset_cls = dset.FashionMNIST
        n_classes = 10
    else:
        raise ValueError(dataset)
    if dataset == "imagenet":
        trn_transform, val_transform = preproc.data_transforms(
            dataset, cutout_length)
        trn_data = dset.ImageFolder(
            root="/home/LAB/gaoch/asdf/data/imagenet_new/train",
            transform=trn_transform)
        input_channels = 3
        input_size = trn_data[0][0].shape[1]
        ret = [input_size, input_channels, n_classes, trn_data]
        if validation:
            val_data = dset.ImageFolder(root=os.path.join(
                data_path, "/home/LAB/gaoch/asdf/data/imagenet_new/valid"),
                                        transform=val_transform)
            ret.append(val_data)
        return ret

    trn_transform, val_transform = preproc.data_transforms(
        dataset, cutout_length)
    trn_data = dset_cls(root=data_path,
                        train=True,
                        download=True,
                        transform=trn_transform)

    # assuming shape is NHW or NHWC
    shape = trn_data.data.shape
    input_channels = 3 if len(shape) == 4 else 1
    assert shape[1] == shape[2], "not expected shape = {}".format(shape)
    input_size = shape[1]

    ret = [input_size, input_channels, n_classes, trn_data]
    if validation:  # append validation data
        ret.append(
            dset_cls(root=data_path,
                     train=False,
                     download=True,
                     transform=val_transform))

    return ret
Example #2
0
def get_data(dataset, data_path, cutout_length, validation):
    """ Get torchvision dataset """
    dataset = dataset.lower()

    if dataset == 'cifar10':
        dset_cls = dset.CIFAR10
        n_classes = 10
    elif dataset == 'mnist':
        dset_cls = dset.MNIST
        n_classes = 10
    elif dataset == 'fashionmnist':
        dset_cls = dset.FashionMNIST
        n_classes = 10
    else:
        raise ValueError(dataset)

    trn_transform, val_transform = preproc.data_transforms(dataset, cutout_length)
    trn_data = dset_cls(root=data_path, train=True, download=True, transform=trn_transform)

    # assuming shape is NHW or NHWC
    shape = trn_data.data.shape
    input_channels = 3 if len(shape) == 4 else 1
    assert shape[1] == shape[2], "not expected shape = {}".format(shape)
    input_size = shape[1]

    ret = [input_size, input_channels, n_classes, trn_data]
    if validation: # append validation data
        ret.append(dset_cls(root=data_path, train=False, download=True, transform=val_transform))

    return ret
Example #3
0
def get_data(dataset, data_path,val1_data_path,val2_data_path, cutout_length, validation,validation2 = False,n_class = 3,image_size = 64):
    """ Get torchvision dataset """
    dataset = dataset.lower()

    if dataset == 'cifar10':
        dset_cls = dset.CIFAR10
        n_classes = 10
    elif dataset == 'mnist':
        dset_cls = dset.MNIST
        n_classes = 10
    elif dataset == 'fashionmnist':
        dset_cls = dset.FashionMNIST
        n_classes = 10
    elif dataset == 'custom':
        dset_cls = dset.ImageFolder
        n_classes = n_class #2 to mama
    else:
        raise ValueError(dataset)

    trn_transform, val_transform = preproc.data_transforms(dataset, cutout_length,image_size)
    if dataset == 'custom':
        print("DATA PATH:", data_path)
        trn_data = dset_cls(root=data_path, transform=trn_transform)
        #dataset_loader = torch.utils.data.DataLoader(trn_data,
        #                                     batch_size=16, shuffle=True,
        #                                     num_workers=1)
        
    else:
        trn_data = dset_cls(root=data_path, train=True, download=True, transform=trn_transform)

    # assuming shape is NHW or NHWC
    if dataset == 'custom':
        shape = [1, image_size, image_size,3]
    else:
        shape = trn_data.train_data.shape
    print(shape)
    input_channels = 3 if len(shape) == 4 else 1
    assert shape[1] == shape[2], "not expected shape = {}".format(shape)
    input_size = shape[1]
    print('input_size: uitls',input_size)

    ret = [input_size, input_channels, n_classes, trn_data]
        
    if validation: # append validation data
        if dataset == 'custom':
            dset_cls = dset.ImageFolder(val1_data_path,transform=val_transform)
            ret.append(dset_cls)
        else:
            ret.append(dset_cls(root=data_path, train=False, download=True, transform=val_transform))
    if validation2:
        if dataset == 'custom':
            dset_cls = dset.ImageFolder(val2_data_path,transform=trn_transform)
            ret.append(dset_cls)
    return ret
def get_data(dataset, data_path, cutout_length, validation):
    opener = urllib.request.build_opener()
    opener.addheaders = [('User-agent', 'Mozilla/5.0')]
    urllib.request.install_opener(opener)
    """ Get torchvision dataset """
    dataset = dataset.lower()

    if dataset == 'cifar10':
        dset_cls = dset.CIFAR10
        n_classes = 10
    elif dataset == 'mnist':
        dset_cls = dset.MNIST
        n_classes = 10
    elif dataset == 'fashionmnist':
        dset_cls = dset.FashionMNIST
        n_classes = 10
    elif dataset == 'toy':
        dset_cls = DsetMock
        n_classes = 2
    else:
        raise ValueError(dataset)

    trn_transform, val_transform = preproc.data_transforms(
        dataset, cutout_length)
    trn_data = dset_cls(root=data_path,
                        train=True,
                        download=True,
                        transform=trn_transform)

    # assuming shape is NHW or NHWC
    try:
        shape = trn_data.train_data.shape
    except:
        shape = trn_data.data.shape
    input_channels = 3 if len(shape) == 4 else 1
    #assert d shape[1] == shape[2], "not expected shape = {}".format(shape)
    input_size = shape[1]

    ret = [input_size, input_channels, n_classes, trn_data]
    if validation:  # append validation data
        ret.append(
            dset_cls(root=data_path,
                     train=False,
                     download=True,
                     transform=val_transform))

    return ret
Example #5
0
def get_data(prop_mouse_data_to_use):
    train_general_transform, train_img_transform, norm_transform, val_transform = preproc.data_transforms()

    trn_data = NeuronDataset(train_samples, transform_norm=norm_transform, 
            transform_img=train_img_transform, transform_both=train_general_transform)

    cut_point = round(prop_mouse_data_to_use * len(val_samples))
    new_train_samples = train_samples + val_samples[:cut_point]
    new_val_samples = val_samples[cut_point:]

    val_data = NeuronDataset(new_val_samples, transform_norm=val_transform) 


    trn_data = NeuronDataset(new_train_samples, transform_norm=norm_transform, 
            transform_img=train_img_transform, transform_both=train_general_transform)

    #shape is HW or HWC
    shape = trn_data.shape
    input_channels = 3 if len(shape) == 3 else 1
    assert shape[0] == shape[1], "not expected shape = {}".format(shape)
    input_size = shape[0]

    return [{'input_size': input_size, 'input_channels': input_channels, 'num_classes': 1}, 
            trn_data, val_data]
Example #6
0
def get_data(dataset, data_root, cutout_length, validation, autoaugment):
    """ Get torchvision dataset """
    dataset = dataset.lower()
    data_path = data_root
    if dataset == 'cifar10':
        dset_cls = dset.CIFAR10
        n_classes = 10
    elif dataset == 'cifar100':
        dset_cls = dset.CIFAR100
        n_classes = 100
    elif dataset == 'mnist':
        dset_cls = dset.MNIST
        n_classes = 10
    elif dataset == 'fashionmnist':
        dset_cls = dset.FashionMNIST
        n_classes = 10
    #New Datasets
    elif dataset == 'mit67':
        dset_cls = dset.ImageFolder
        n_classes = 67
        data_path = '%s/MIT67/train' % data_root  # 'data/MIT67/train'
        val_path = '%s/MIT67/test' % data_root  # 'data/MIT67/val'
    elif dataset == 'sport8':
        dset_cls = dset.ImageFolder
        n_classes = 8
        data_path = '%s/Sport8/train' % data_root  # 'data/Sport8/train'
        val_path = '%s/Sport8/test' % data_root  # 'data/Sport8/val'
    elif dataset == 'caltech101':
        dset_cls = dset.ImageFolder
        n_classes = 101
        data_path = '%s/Caltech101/train' % data_root  # 'data/Caltech101/train'
        val_path = '%s/Caltech101/test' % data_root  # 'data/Caltech101/val'
    else:
        raise ValueError(dataset)

    trn_transform, val_transform = preproc.data_transforms(
        dataset, cutout_length, autoaugment)
    if dataset in LARGE_DATASETS:
        trn_data = dset_cls(root=data_path, transform=trn_transform)
        shape = trn_data[0][0].unsqueeze(0).shape
        print(shape)
        assert shape[2] == shape[3], "not expected shape = {}".format(shape)
        input_size = shape[2]
    else:
        trn_data = dset_cls(root=data_path,
                            train=True,
                            download=True,
                            transform=trn_transform)
        # assuming shape is NHW or NHWC
        try:
            shape = trn_data.data.shape
        except AttributeError:
            shape = trn_data.train_data.shape
        assert shape[1] == shape[2], "not expected shape = {}".format(shape)
        input_size = shape[1]

    input_channels = 3 if len(shape) == 4 else 1
    # print("Number of input channels: ", input_channels)

    ret = [input_size, input_channels, n_classes, trn_data]
    if validation:  # append validation data
        if dataset in LARGE_DATASETS:
            ret.append(dset_cls(root=val_path, transform=val_transform))
        else:
            ret.append(
                dset_cls(root=data_path,
                         train=False,
                         download=True,
                         transform=val_transform))

    return ret