Exemplo n.º 1
0
    def __init__(self,
                 data_dir,
                 batch_size,
                 shuffle=True,
                 validation_split=0.0,
                 num_batches=0,
                 training=True,
                 num_workers=4,
                 pin_memory=True,
                 config=None,
                 teacher_idx=None,
                 seed=8888):

        self.batch_size = batch_size
        self.num_workers = num_workers
        self.num_batches = num_batches
        self.training = training

        self.transform_train = transforms.Compose([
            transforms.Resize(256),
            transforms.RandomCrop(224),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            transforms.Normalize((0.6959, 0.6537, 0.6371),
                                 (0.3113, 0.3192, 0.3214)),
        ])
        self.transform_val = transforms.Compose([
            transforms.Resize(256),
            transforms.CenterCrop(224),
            transforms.ToTensor(),
            transforms.Normalize((0.6959, 0.6537, 0.6371),
                                 (0.3113, 0.3192, 0.3214)),
        ])

        self.data_dir = data_dir
        if config == None:
            config = ConfigParser.get_instance()
        cfg_trainer = config['trainer']
        self.train_dataset, self.val_dataset = get_clothing1m(
            config['data_loader']['args']['data_dir'],
            cfg_trainer,
            num_samples=self.num_batches * self.batch_size,
            train=training,
            #         self.train_dataset, self.val_dataset = get_clothing1m(config['data_loader']['args']['data_dir'], cfg_trainer, num_samples=260000, train=training,
            transform_train=self.transform_train,
            transform_val=self.transform_val,
            teacher_idx=teacher_idx,
            seed=seed)

        super().__init__(self.train_dataset,
                         batch_size,
                         shuffle,
                         validation_split,
                         num_workers,
                         pin_memory,
                         val_dataset=self.val_dataset)
Exemplo n.º 2
0
 def __init__(self, num_examp, num_classes=10, beta=0.3):
     super().__init__()
     self.num_classes = num_classes
     self.config = ConfigParser.get_instance()
     self.USE_CUDA = torch.cuda.is_available()
     self.target = torch.zeros(
         num_examp,
         self.num_classes).cuda() if self.USE_CUDA else torch.zeros(
             num_examp, self.num_classes)
     self.beta = beta
Exemplo n.º 3
0
    def __init__(self,
                 data_dir,
                 batch_size,
                 shuffle=True,
                 validation_split=0.0,
                 num_batches=0,
                 training=True,
                 num_workers=4,
                 pin_memory=True,
                 num_class=50,
                 teacher_idx=None):

        self.batch_size = batch_size
        self.num_workers = num_workers
        self.num_batches = num_batches
        self.training = training

        self.transform_train = transforms.Compose([
            transforms.RandomCrop(227),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
        ])
        self.transform_val = transforms.Compose([
            transforms.CenterCrop(227),
            transforms.ToTensor(),
            transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
        ])
        self.transform_imagenet = transforms.Compose([
            transforms.Resize(256),
            transforms.CenterCrop(227),
            transforms.ToTensor(),
            transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
        ])

        self.data_dir = data_dir
        config = ConfigParser.get_instance()
        cfg_trainer = config['trainer']
        self.train_dataset, self.val_dataset = get_webvision(
            config['data_loader']['args']['data_dir'],
            cfg_trainer,
            num_samples=self.num_batches * self.batch_size,
            train=training,
            transform_train=self.transform_train,
            transform_val=self.transform_val,
            num_class=num_class,
            teacher_idx=teacher_idx)

        super().__init__(self.train_dataset,
                         batch_size,
                         shuffle,
                         validation_split,
                         num_workers,
                         pin_memory,
                         val_dataset=self.val_dataset)
Exemplo n.º 4
0
    def __init__(self,
                 data_dir,
                 batch_size,
                 shuffle=True,
                 validation_split=0.0,
                 num_batches=0,
                 training=True,
                 num_workers=4,
                 pin_memory=True,
                 config=None,
                 teacher_idx=None,
                 seed=888):

        if config is None:
            config = ConfigParser.get_instance()
        cfg_trainer = config['trainer']

        transform_train = transforms.Compose([
            #transforms.ColorJitter(brightness= 0.4, contrast= 0.4, saturation= 0.4, hue= 0.1),
            transforms.RandomCrop(32, padding=4),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            transforms.Normalize((0.5071, 0.4867, 0.4408),
                                 (0.2675, 0.2565, 0.2761)),
        ])
        transform_val = transforms.Compose([
            transforms.ToTensor(),
            transforms.Normalize((0.5071, 0.4867, 0.4408),
                                 (0.2675, 0.2565, 0.2761)),
        ])
        self.data_dir = data_dir
        #         cfg_trainer = config['trainer']

        noise_file = '%sCIFAR100_%.1f_Asym_%s.json' % (
            config['data_loader']['args']['data_dir'], cfg_trainer['percent'],
            cfg_trainer['asym'])

        self.train_dataset, self.val_dataset = get_cifar100(
            config['data_loader']['args']['data_dir'],
            cfg_trainer,
            train=training,
            transform_train=transform_train,
            transform_val=transform_val,
            noise_file=noise_file,
            teacher_idx=teacher_idx,
            seed=seed)

        super().__init__(self.train_dataset,
                         batch_size,
                         shuffle,
                         validation_split,
                         num_workers,
                         pin_memory,
                         val_dataset=self.val_dataset)
Exemplo n.º 5
0
    def __init__(self,
                 data_dir,
                 batch_size,
                 shuffle=True,
                 validation_split=0.0,
                 num_batches=0,
                 training=True,
                 num_workers=4,
                 pin_memory=True,
                 config=None,
                 teacher_idx=None,
                 seed=888):
        if config == None:
            config = ConfigParser.get_instance()
        cfg_trainer = config['trainer']

        transform_train = transforms.Compose([
            transforms.RandomCrop(32, padding=4),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            transforms.Normalize((0.4914, 0.4822, 0.4465),
                                 (0.2023, 0.1994, 0.2010)),
        ])
        transform_val = transforms.Compose([
            transforms.ToTensor(),
            transforms.Normalize((0.4914, 0.4822, 0.4465),
                                 (0.2023, 0.1994, 0.2010)),
        ])
        self.data_dir = data_dir

        noise_file = '%sCIFAR10_%.1f_Asym_%s.json' % (
            config['data_loader']['args']['data_dir'], cfg_trainer['percent'],
            cfg_trainer['asym'])

        self.train_dataset, self.val_dataset = get_cifar10(
            config['data_loader']['args']['data_dir'],
            cfg_trainer,
            train=training,
            transform_train=transform_train,
            transform_val=transform_val,
            noise_file=noise_file,
            teacher_idx=teacher_idx,
            seed=seed)

        super().__init__(self.train_dataset,
                         batch_size,
                         shuffle,
                         validation_split,
                         num_workers,
                         pin_memory,
                         val_dataset=self.val_dataset)