Esempio n. 1
0
def test_loader():
    parser = NeonArgparser(__doc__)
    args = parser.parse_args()

    train_dir = os.path.join(args.data_dir, 'macrotrain')
    test_dir = os.path.join(args.data_dir, 'macrotest')
    write_batches(args, train_dir, trainimgs, 0)
    write_batches(args, test_dir, testimgs, 1)
    train = ImageLoader(set_name='train', do_transforms=False, inner_size=32,
                        repo_dir=train_dir)
    test = ImageLoader(set_name='validation', do_transforms=False, inner_size=32,
                       repo_dir=test_dir)
    train.init_batch_provider()
    test.init_batch_provider()
    err = run(args, train, test)
    test.exit_batch_provider()
    train.exit_batch_provider()
    return err
Esempio n. 2
0
def test_loader():
    parser = NeonArgparser(__doc__)
    args = parser.parse_args()

    train_dir = os.path.join(args.data_dir, 'macrotrain')
    test_dir = os.path.join(args.data_dir, 'macrotest')
    write_batches(args, train_dir, trainimgs, 0)
    write_batches(args, test_dir, testimgs, 1)
    train = ImageLoader(set_name='train',
                        do_transforms=False,
                        inner_size=32,
                        repo_dir=train_dir)
    test = ImageLoader(set_name='validation',
                       do_transforms=False,
                       inner_size=32,
                       repo_dir=test_dir)
    train.init_batch_provider()
    test.init_batch_provider()
    err = run(args, train, test)
    test.exit_batch_provider()
    train.exit_batch_provider()
    return err
Esempio n. 3
0
                      serialize=1, history=3, save_path='serialize_test.pkl')

lr_sched = PolySchedule(total_epochs=10, power=0.5)
opt_gdm = GradientDescentMomentum(0.01, 0.9, wdecay=0.0002, schedule=lr_sched)
opt_biases = GradientDescentMomentum(0.02, 0.9, schedule=lr_sched)

opt = MultiOptimizer({'default': opt_gdm, 'Bias': opt_biases})
if not args.resume:
    # fit the model for 3 epochs
    model.fit(train, optimizer=opt, num_epochs=3, cost=cost, callbacks=callbacks)

train.reset()
# get 1 image
for im, l in train:
    break
train.exit_batch_provider()
with open('im1.pkl', 'w') as fid:
    pickle.dump((im.get(), l.get()), fid)
im_save = im.get().copy()
if args.resume:
    with open('im1.pkl', 'r') as fid:
        (im2, l2) = pickle.load(fid)
    im.set(im2)
    l.set(l2)

# run fprop and bprop on this minibatch save the results
out_fprop = model.fprop(im)
out_fprop_save = [x.get() for x in out_fprop]
im.set(im_save)
out_fprop = model.fprop(im)
out_fprop_save2 = [x.get() for x in out_fprop]
Esempio n. 4
0
opt_biases = GradientDescentMomentum(0.02, 0.9, schedule=lr_sched)

opt = MultiOptimizer({'default': opt_gdm, 'Bias': opt_biases})
if not args.resume:
    # fit the model for 3 epochs
    model.fit(train,
              optimizer=opt,
              num_epochs=3,
              cost=cost,
              callbacks=callbacks)

train.reset()
# get 1 image
for im, l in train:
    break
train.exit_batch_provider()
save_obj((im.get(), l.get()), 'im1.pkl')
im_save = im.get().copy()
if args.resume:
    (im2, l2) = load_obj('im1.pkl')
    im.set(im2)
    l.set(l2)

# run fprop and bprop on this minibatch save the results
out_fprop = model.fprop(im)

out_fprop_save = [x.get() for x in out_fprop]
im.set(im_save)
out_fprop = model.fprop(im)
out_fprop_save2 = [x.get() for x in out_fprop]
for x, y in zip(out_fprop_save, out_fprop_save2):