Пример #1
0
ap.add_argument("--shuffle", required=False, type=booltype, default=True)
ap.add_argument("--stratify", required=False, type=booltype, default=True)
ap.add_argument("--uniform", required=False, type=booltype, default=False)
ap.add_argument("--dry-run", required=False, type=booltype, default=False)

args = vars(ap.parse_args())
args = Struct(**args)

num_train = cfg.num_trains[args.dataset]
num_test = cfg.num_tests[args.dataset]
num_classes = cfg.output_sizes[args.dataset]

kwargs = {}

train_loader = get_loader(args.dataset,
                          num_train,
                          train=True,
                          subset=args.repeat)
test_loader = get_loader(args.dataset,
                         num_test,
                         train=False,
                         subset=args.repeat)

for data, target in train_loader:
    X_train = data
    y_train = target

for data, target in test_loader:
    X_test = data
    y_test = target

# def repeat_data(data, repeat):
Пример #2
0
datasets = [
    # 'celeba',  # regression
    # 'cifar',  # classification
    # 'coco', 'voc',  # semantic segmentation
    'fmnist',
    'mnist',
    'svhn'  # classification
]
test_subset = ['celeba', 'coco']

for dataset in datasets:
    for split in [True, False]:
        print('-' * 80)
        print('dataset: {}, train:{}'.format(dataset, split))
        loader = get_loader(dataset, batch_size=16, train=split, shuffle=True)
        print('\tdata_size:', len(loader.dataset))
        for data, label in loader:
            print('\tbatch_size:', data.shape, label.shape)
            break

        if dataset not in test_subset:
            continue
        loader = get_loader(dataset,
                            batch_size=16,
                            train=split,
                            shuffle=True,
                            subset=0.5)
        print('\tdata_size:', len(loader.dataset))
        for data, label in loader:
            print('\tbatch_size:', data.shape, label.shape)
Пример #3
0
tb = SummaryWriter(paths.tb_path)

print('+' * 80)
print(paths.model_name)
print('+' * 80)

print(args.__dict__)
print('+' * 80)

# prepare graph and data
_, workers = get_fl_graph(hook, args.num_workers)
print('Loading data: {}'.format(paths.data_path))
X_trains, _, y_trains, _, meta = pkl.load(open(paths.data_path, 'rb'))

test_loader = get_loader(args.dataset,
                         args.test_batch_size,
                         train=False,
                         noise=args.noise)

print('+' * 80)

# ------------------------------------------------------------------------------
# Fire the engines
# ------------------------------------------------------------------------------

model, loss_type = get_model(args, ckpt_path=args.load_model)
if args.batch_size == 0:
    args.batch_size = int(meta['batch_size'])
    print("Resetting batch size: {}...".format(args.batch_size))

print('+' * 80)
h_epoch = []
Пример #4
0
import argparse
from data.loader import get_loader


def get_parser():
    parser = argparse.ArgumentParser()
    parser.add_argument('--data_dir', type=str, required=True)
    parser.add_argument('--batch_size', default=1, type=int)
    parser.add_argument('--verbose', action="store_true")
    parser.add_argument("--num_workers", default=8, type=int)

    return parser


args = get_parser().parse_args()

loader = get_loader(args)
# print(iter(loader).next())
import pdb
pdb.set_trace()
for src, tgt in loader:
    print(src[0])  #, tgt)
Пример #5
0
tb = SummaryWriter(paths.tb_path)

print('+' * 80)
print(paths.model_name)
print('+' * 80)

print(args.__dict__)
print('+' * 80)

if args.batch_size == 0:
    args.batch_size = args.num_train
    print("Resetting batch size: {}...".format(args.batch_size))

train_loader = get_loader(args.dataset,
                          args.batch_size,
                          train=True,
                          subset=args.repeat,
                          force_resize=cfg.model_im_size[args.clf])
test_loader = get_loader(args.dataset,
                         args.test_batch_size,
                         train=False,
                         shuffle=False,
                         subset=args.repeat,
                         force_resize=cfg.model_im_size[args.clf])
print('Train size: ', len(train_loader.dataset))
print('Test size: ', len(test_loader.dataset))

print('+' * 80)

# ------------------------------------------------------------------------------
# Fire the engines