Exemple #1
0
def main():
    # parse the command line arguments
    parser = NeonArgparser(__doc__)

    args = parser.parse_args()

    logger = logging.getLogger()
    logger.setLevel(args.log_thresh)

    #Set up batch iterator for training images
    train = ImgMaster(repo_dir='dataTmp_optFlow_BW',
                      set_name='train',
                      inner_size=240,
                      subset_pct=100)
    val = ImgMaster(repo_dir='dataTmp_optFlow_BW',
                    set_name='train',
                    inner_size=240,
                    subset_pct=100,
                    do_transforms=False)
    test = ImgMaster(repo_dir='dataTestTmp_optFlow_BW',
                     set_name='train',
                     inner_size=240,
                     subset_pct=100,
                     do_transforms=False)

    train.init_batch_provider()
    val.init_batch_provider()
    test.init_batch_provider()

    print "Constructing network..."
    #Create AlexNet architecture
    model = constuct_network()

    # Optimzer for model
    opt = Adadelta()

    # configure callbacks
    valmetric = TopKMisclassification(k=5)
    callbacks = Callbacks(model,
                          train,
                          eval_set=test,
                          metric=valmetric,
                          **args.callback_args)

    cost = GeneralizedCost(costfunc=CrossEntropyMulti())

    #flag = input("Press Enter if you want to begin training process.")
    print "Training network..."
    model.fit(train,
              optimizer=opt,
              num_epochs=args.epochs,
              cost=cost,
              callbacks=callbacks)
    mets = model.eval(test, metric=valmetric)

    print 'Validation set metrics:'
    print 'LogLoss: %.2f, Accuracy: %.1f %%0 (Top-1), %.1f %% (Top-5)' % (
        mets[0], (1.0 - mets[1]) * 100, (1.0 - mets[2]) * 100)
    return
Exemple #2
0
parser.add_argument('--model_tree', help='Whether or not to train tree of classifiers',
                    default=False, type=bool)
parser.add_argument('--freeze', type=int, help='Layers to freeze starting from end', default=0)
parser.add_argument('--dataset_dir', help='Directory containing images folder and label text files')
args = parser.parse_args()

# setup data provider
train_set_options = dict(repo_dir=args.data_dir,
                       inner_size=224,
                       dtype=args.datatype,
                       subset_pct=100)
test_set_options = dict(repo_dir=args.data_dir,
                       inner_size=224,
                       dtype=args.datatype,
                       subset_pct=20)

train = ImageLoader(set_name='train', **train_set_options)
test = ImageLoader(set_name='train', do_transforms=False, **test_set_options)

model, cost, opt = create_model(args.model_type, args.model_tree, args.freeze, args.dataset_dir,
                                args.model_file, train)

# configure callbacks
valmetric = TopKMisclassification(k=5)
valmetric.name = 'root_misclass'
# If freezing layers, load model in create_model
if args.freeze > 0:
    args.callback_args['model_file'] = None
callbacks = Callbacks(model, train, eval_set=test, metric=valmetric, **args.callback_args)
model.fit(train, optimizer=opt, num_epochs=args.epochs, cost=cost, callbacks=callbacks)
Exemple #3
0
model, cost = create_network()
rseed = 0 if args.rng_seed is None else args.rng_seed

# setup data provider
assert 'train' in args.manifest, "Missing train manifest"
assert 'val' in args.manifest, "Missing validation manifest"
train = make_alexnet_train_loader(args.manifest['train'], args.manifest_root,
                                  model.be, args.subset_pct, rseed)
valid = make_validation_loader(args.manifest['val'], args.manifest_root,
                               model.be, args.subset_pct)

sched_weight = Schedule([10], change=0.1)
opt = GradientDescentMomentum(0.01, 0.9, wdecay=0.0005, schedule=sched_weight)

# configure callbacks
valmetric = TopKMisclassification(k=5)
callbacks = Callbacks(model,
                      eval_set=valid,
                      metric=valmetric,
                      **args.callback_args)

if args.deconv:
    callbacks.add_deconv_callback(train, valid)

model.fit(train,
          optimizer=opt,
          num_epochs=args.epochs,
          cost=cost,
          callbacks=callbacks)
Exemple #4
0
 def on_epoch_end(self, epoch):
     self.valid_set.reset()
     allmetrics = TopKMisclassification(k=5)
     stats = mlp.eval(self.valid_set, metric=allmetrics)
     print ", ".join(allmetrics.metric_names) + ": " + ", ".join(
         map(str, stats.flatten()))
def main():
    # parse the command line arguments
    parser = NeonArgparser(__doc__)

    args = parser.parse_args()

    logger = logging.getLogger()
    logger.setLevel(args.log_thresh)

    #Set up batch iterator for training images
    print "Setting up data batch loaders..."
    train = ImgMaster(repo_dir='dataTmp',
                      set_name='train',
                      inner_size=120,
                      subset_pct=100)
    val = ImgMaster(repo_dir='dataTmp',
                    set_name='train',
                    inner_size=120,
                    subset_pct=100,
                    do_transforms=False)
    test = ImgMaster(repo_dir='dataTestTmp',
                     set_name='train',
                     inner_size=120,
                     subset_pct=100,
                     do_transforms=False)

    train.init_batch_provider()
    val.init_batch_provider()
    test.init_batch_provider()

    print "Constructing network..."
    #Create AlexNet architecture
    model = constuct_network()

    #model.load_weights(args.model_file)

    # drop weights LR by 1/250**(1/3) at epochs (23, 45, 66), drop bias LR by 1/10 at epoch 45
    weight_sched = Schedule([22, 44, 65, 90, 97], (1 / 250.)**(1 / 3.))
    opt_gdm = GradientDescentMomentum(0.01,
                                      0.9,
                                      wdecay=0.005,
                                      schedule=weight_sched)
    opt_biases = GradientDescentMomentum(0.04,
                                         1.0,
                                         schedule=Schedule([130], .1))
    opt = MultiOptimizer({'default': opt_gdm, 'Bias': opt_biases})

    # configure callbacks
    valmetric = TopKMisclassification(k=5)
    callbacks = Callbacks(model,
                          train,
                          eval_set=val,
                          metric=valmetric,
                          **args.callback_args)

    cost = GeneralizedCost(costfunc=CrossEntropyMulti())

    #flag = input("Press Enter if you want to begin training process.")
    print "Training network..."
    model.fit(train,
              optimizer=opt,
              num_epochs=args.epochs,
              cost=cost,
              callbacks=callbacks)
    mets = model.eval(test, metric=valmetric)

    print 'Validation set metrics:'
    print 'LogLoss: %.2f, Accuracy: %.1f %%0 (Top-1), %.1f %% (Top-5)' % (
        mets[0], (1.0 - mets[1]) * 100, (1.0 - mets[2]) * 100)
    test.exit_batch_provider()
    val.exit_batch_provider()
    train.exit_batch_provider()
Exemple #6
0
# set up 3x3 conv stacks with different feature map sizes
for nofm in [64, 128, 256, 512, 512]:
    layers.append(Conv((3, 3, nofm), **conv_params))
    layers.append(Conv((3, 3, nofm), **conv_params))
    if nofm > 128:
        layers.append(Conv((3, 3, nofm), **conv_params))
        if args.vgg_version == 'E':
            layers.append(Conv((3, 3, nofm), **conv_params))
    layers.append(Pooling(2, strides=2))

layers.append(Affine(nout=4096, init=initfc, bias=Constant(0), activation=relu))
layers.append(Dropout(keep=0.5))
layers.append(Affine(nout=4096, init=initfc, bias=Constant(0), activation=relu))
layers.append(Dropout(keep=0.5))
layers.append(Affine(nout=1000, init=initfc, bias=Constant(0), activation=Softmax()))

cost = GeneralizedCost(costfunc=CrossEntropyMulti())

model = Model(layers=layers)

# configure callbacks
top5 = TopKMisclassification(k=5)
callbacks = Callbacks(model, eval_set=test, metric=top5, **args.callback_args)

model.load_params(args.model_file)
mets=model.eval(test, metric=TopKMisclassification(k=5))
print 'Validation set metrics:'
print 'LogLoss: %.2f, Accuracy: %.1f %% (Top-1), %.1f %% (Top-5)' % (mets[0],
                                                                    (1.0-mets[1])*100,
                                                                    (1.0-mets[2])*100)
Exemple #7
0
        layers.append(Conv((3, 3, nofm), **conv_params))
        layers.append(Conv((3, 3, nofm), **conv_params))
        layers.append(Pooling(3, strides=2))
    for nofm in [256, 512, 512]:
        layers.append(Conv((3, 3, nofm), **conv_params))
        layers.append(Conv((3, 3, nofm), **conv_params))
        layers.append(Conv((3, 3, nofm), **conv_params))
        layers.append(Pooling(3, strides=2))
else:
    raise ValueError("Invalid specification for VGG model")

layers.append(Affine(nout=4096, init=init1, batch_norm=True, activation=relu))
layers.append(Dropout(keep=0.5))
layers.append(Affine(nout=4096, init=init1, batch_norm=True, activation=relu))
layers.append(Dropout(keep=0.5))
layers.append(Affine(nout=1000, init=init1, bias=Constant(0), activation=Softmax()))

cost = GeneralizedCost(costfunc=CrossEntropyMulti(scale=cost_scale))

opt = MultiOptimizer({'default': opt_gdm, 'Bias': opt_biases})

mlp = Model(layers=layers)

# configure callbacks
callbacks = Callbacks(mlp, train, args, eval_set=test, metric=TopKMisclassification(k=5))

mlp.fit(train, optimizer=opt, num_epochs=args.epochs, cost=cost, callbacks=callbacks)

test.exit_batch_provider()
train.exit_batch_provider()