Ejemplo n.º 1
0
        % epoch_item,
        file=F_txt)
    adjust_learning_rate(optimizer, epoch_item)

    # ======================================= Folder of Datasets =======================================
    # image transform & normalization
    ImgTransform = transforms.Compose([
        transforms.Resize((opt.imageSize, opt.imageSize)),
        transforms.ToTensor(),
        transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
    ])

    trainset = Imagefolder_csv(data_dir=opt.dataset_dir,
                               mode=opt.mode,
                               image_size=opt.imageSize,
                               transform=ImgTransform,
                               episode_num=opt.episode_train_num,
                               way_num=opt.way_num,
                               shot_num=opt.shot_num,
                               query_num=opt.query_num)
    valset = Imagefolder_csv(data_dir=opt.dataset_dir,
                             mode='val',
                             image_size=opt.imageSize,
                             transform=ImgTransform,
                             episode_num=opt.episode_val_num,
                             way_num=opt.way_num,
                             shot_num=opt.shot_num,
                             query_num=opt.query_num)
    testset = Imagefolder_csv(data_dir=opt.dataset_dir,
                              mode='test',
                              image_size=opt.imageSize,
                              transform=ImgTransform,
total_h = np.zeros(repeat_num)
for r in range(repeat_num):

    # =================== Folder of Datasets =====================

    # image transform & normalization
    ImgTransform = transforms.Compose([
        transforms.Resize((opt.imageSize, opt.imageSize)),
        transforms.ToTensor(),
        transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
    ])

    testset = Imagefolder_csv(data_dir=opt.dataset_dir,
                              mode=opt.mode,
                              image_size=opt.imageSize,
                              transform=ImgTransform,
                              episode_num=opt.episode_num,
                              way_num=opt.way_num,
                              shot_num=opt.shot_num,
                              query_num=opt.query_num)
    print('.........The %d-th round.........' % r)
    print('.........The %d-th round.........' % r, file=F_txt)
    print('Testset: %d-------------%d' % (len(testset), r), file=F_txt)

    # ===================== Load Datasets =======================
    test_loader = torch.utils.data.DataLoader(testset,
                                              batch_size=opt.testepisodeSize,
                                              shuffle=True,
                                              num_workers=int(opt.workers),
                                              drop_last=True,
                                              pin_memory=True)