Ejemplo n.º 1
0
                    args.num_classes)  # 数据越多 meta_epoch 增长越快
args.note = """
1.system sample 从 D_L 取 meta_dataset
2.每个 meta_epoch 后,finetune 3 epochs;或者直接从 D_L 取更多数据;去掉 finetune 阶段
3.重回 unlabel_dataloader 全局 shuffle, global_total_uc 增加趋势差不多 16335/60
最终结果 91.13
"""

pprint(vars(args))

# cudnn.benchmark = True

# get datasets
random.seed(args.seed)  # 保证初始划分 label/unlabel 一致
label_dataset, unlabel_dataset, _, test_dataset = get_imb_meta_test_datasets(
    args.dataset, args.num_classes, 0, args.imb_factor, args.split,
    args.ratio)  # CIFAR10
random.seed()  # 解除

# imb_train/valid_meta/test
kwargs = {'num_workers': 4, 'pin_memory': True}

# 将 label/unlabel loader 放入同一 batch
label_loader = DataLoader(label_dataset,
                          batch_size=args.batch_size,
                          shuffle=True,
                          **kwargs)
unlabel_loader = DataLoader(
    unlabel_dataset,  # DataLoader will cvt np to tensor
    batch_size=args.uc_batchsize,
    shuffle=True,
Ejemplo n.º 2
0
    default='exp',
    type=str,
    help='experiment tag to create tensorboard, model save dir name')

params = [
    '--dataset', 'cifar10', '--num_classes', '10', '--imb_factor', '1',
    '--num_meta', '0', '--tag', 'base'
]
args = parser.parse_args(params)
pprint(vars(args))

cudnn.benchmark = True

# get datasets
random.seed(args.seed)
train_dataset, _, test_dataset = get_imb_meta_test_datasets(
    args.dataset, args.num_classes, args.num_meta, args.imb_factor)

# imb_train/valid_meta/test
kwargs = {'num_workers': 4, 'pin_memory': True}
train_loader = DataLoader(train_dataset,
                          batch_size=args.batch_size,
                          drop_last=False,
                          shuffle=True,
                          **kwargs)
test_loader = DataLoader(test_dataset,
                         batch_size=args.batch_size,
                         shuffle=False,
                         **kwargs)
"""
baseline
- directly train on total cifar10 dataset