コード例 #1
0
ファイル: data_helper.py プロジェクト: vdevmcitylp/JigenDG
def get_target_jigsaw_loader(args):
    img_transformer, tile_transformer = get_train_transformers(args)
    if args.stylized:
        name_train, _, labels_train, _ = get_split_dataset_info(
            join(dirname(__file__), 'txt_lists', 'Stylized' + args.dataset,
                 "{}_target".format(args.target),
                 '%s_train.txt' % args.target), 0)
    else:
        name_train, _, labels_train, _ = get_split_dataset_info(
            join(dirname(__file__), 'txt_lists', 'Vanilla' + args.dataset,
                 '%s_train.txt' % args.target), 0)
    dataset = JigsawDataset(name_train,
                            labels_train,
                            patches=False,
                            img_transformer=img_transformer,
                            tile_transformer=tile_transformer,
                            jig_classes=args.jigsaw_n_classes,
                            bias_whole_image=args.bias_whole_image,
                            grid_size=args.grid_size)
    loader = torch.utils.data.DataLoader(dataset,
                                         batch_size=args.batch_size,
                                         shuffle=True,
                                         num_workers=4,
                                         pin_memory=True,
                                         drop_last=True)
    return loader
コード例 #2
0
ファイル: data_helper.py プロジェクト: txsing/JigenDG
def get_train_dataloader(args, patches):
    dataset_list = args.source
    assert isinstance(dataset_list, list)
    train_datasets = []
    val_datasets = []
    img_transformer, tile_transformer = get_train_transformers(args)
    limit = args.limit_source
    for dname in dataset_list:
        name_train, name_val, labels_train, labels_val = get_split_dataset_info(
            join(dirname(__file__), 'txt_lists','%s_train.txt' % dname),
            args.val_size
        )
        train_dataset = JigsawDataset(name_train, labels_train, patches=patches,
                                      img_transformer=img_transformer,
                                      tile_transformer=tile_transformer,
                                      jig_classes=args.jigsaw_n_classes,
                                      bias_whole_image=args.bias_whole_image)
        if limit:
            train_dataset = Subset(train_dataset, limit)
        train_datasets.append(train_dataset)

        # Validation test => subtracted from train split
        val_datasets.append(
            JigsawTestDataset(name_val, labels_val, img_transformer=get_val_transformer(args),
                              patches=patches, jig_classes=args.jigsaw_n_classes))

    train_dataset = ConcatDataset(train_datasets)
    val_dataset = ConcatDataset(val_datasets)
    train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=4, pin_memory=True, drop_last=True)
    val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, num_workers=4, pin_memory=True, drop_last=False)
    return train_loader, val_loader
コード例 #3
0
def get_train_dataloader(args):
    dataset_list = args.source
    assert isinstance(dataset_list, list)
    datasets = []
    val_datasets = []
    img_transformer = get_train_transformers(args)
    limit = args.limit_source

    for dname in dataset_list:
        if dname in digits_datasets:
            return get_digital_train_dataloader(args, dname)
        name_train, name_val, labels_train, labels_val = get_split_dataset_info(
            join(dirname(__file__), 'txt_lists', '%s_train.txt' % dname),
            args.val_size)
        train_dataset = JigsawDataset(name_train,
                                      labels_train,
                                      img_transformer=img_transformer)
        if limit:
            train_dataset = Subset(train_dataset, limit)
        datasets.append(train_dataset)
        val_datasets.append(
            JigsawDataset(name_val,
                          labels_val,
                          img_transformer=get_val_transformer(args)))
    dataset = ConcatDataset(datasets)
    val_dataset = ConcatDataset(val_datasets)
    loader = torch.utils.data.DataLoader(dataset,
                                         batch_size=args.batch_size,
                                         shuffle=True,
                                         num_workers=4,
                                         pin_memory=True,
                                         drop_last=True)
    val_loader = torch.utils.data.DataLoader(val_dataset,
                                             batch_size=args.batch_size,
                                             shuffle=False,
                                             num_workers=4,
                                             pin_memory=True,
                                             drop_last=False)
    return loader, val_loader
コード例 #4
0
ファイル: data_helper.py プロジェクト: vdevmcitylp/JigenDG
def get_train_dataloader(args, patches):
    dataset_list = args.source
    assert isinstance(dataset_list, list)
    datasets = []
    val_datasets = []
    img_transformer, tile_transformer = get_train_transformers(args)
    limit = args.limit_source
    for dname in dataset_list:
        if args.stylized:
            name_train, name_val, labels_train, labels_val = get_split_dataset_info(
                join(dirname(__file__), 'txt_lists', 'Stylized' + args.dataset,
                     "{}_target".format(args.target), '%s_train.txt' % dname),
                args.val_size)
            # print(name_train)

        else:
            name_train, name_val, labels_train, labels_val = get_split_dataset_info(
                join(dirname(__file__), 'txt_lists', 'Vanilla' + args.dataset,
                     '%s_train.txt' % dname), args.val_size)

        train_dataset = JigsawDataset(name_train,
                                      labels_train,
                                      patches=patches,
                                      img_transformer=img_transformer,
                                      tile_transformer=tile_transformer,
                                      jig_classes=args.jigsaw_n_classes,
                                      bias_whole_image=args.bias_whole_image,
                                      grid_size=args.grid_size)

        if limit:
            train_dataset = Subset(train_dataset, limit)
        datasets.append(train_dataset)
        if args.jig_only:
            val_datasets.append(
                JigsawDataset(name_val,
                              labels_val,
                              patches=patches,
                              img_transformer=img_transformer,
                              tile_transformer=tile_transformer,
                              jig_classes=args.jigsaw_n_classes,
                              bias_whole_image=args.bias_whole_image,
                              grid_size=args.grid_size))
        else:
            val_datasets.append(
                JigsawTestDataset(name_val,
                                  labels_val,
                                  img_transformer=get_val_transformer(args),
                                  patches=patches,
                                  jig_classes=args.jigsaw_n_classes))

        #val_datasets.append(JigsawTestDataset(name_val, labels_val, img_transformer=get_val_transformer(args),
        #   patches=patches, jig_classes=args.jigsaw_n_classes))
    dataset = ConcatDataset(datasets)
    val_dataset = ConcatDataset(val_datasets)
    loader = torch.utils.data.DataLoader(dataset,
                                         batch_size=args.batch_size,
                                         shuffle=True,
                                         num_workers=4,
                                         pin_memory=True,
                                         drop_last=True)
    val_loader = torch.utils.data.DataLoader(val_dataset,
                                             batch_size=args.batch_size,
                                             shuffle=False,
                                             num_workers=4,
                                             pin_memory=True,
                                             drop_last=False)
    return loader, val_loader