Esempio n. 1
0
def set_up_experiment(args,
                      input_,
                      probs,
                      labels):
    # Set up objective function
    cross_entropy = lbann.CrossEntropy([probs, labels])
    layers = list(lbann.traverse_layer_graph(input_))
    weights = set()
    for l in layers:
        weights.update(l.weights)
    # scale = weight decay
    l2_reg = lbann.L2WeightRegularization(weights=weights, scale=1e-4)
    objective_function = lbann.ObjectiveFunction([cross_entropy, l2_reg])

    # Set up model
    top1 = lbann.CategoricalAccuracy([probs, labels])
    top5 = lbann.TopKCategoricalAccuracy([probs, labels], k=5)
    metrics = [lbann.Metric(top1, name='top-1 accuracy', unit='%'),
               lbann.Metric(top5, name='top-5 accuracy', unit='%')]
    callbacks = [lbann.CallbackPrint(),
                 lbann.CallbackTimer(),
                 lbann.CallbackDropFixedLearningRate(
                     drop_epoch=[30, 60], amt=0.1)]
    model = lbann.Model(args.mini_batch_size,
                        args.num_epochs,
                        layers=layers,
                        weights=weights,
                        objective_function=objective_function,
                        metrics=metrics,
                        callbacks=callbacks)

    # Load data reader from prototext
    data_reader_proto = lbann.lbann_pb2.LbannPB()
    with open(args.data_reader, 'r') as f:
        txtf.Merge(f.read(), data_reader_proto)
    data_reader_proto = data_reader_proto.data_reader

    # Set up optimizer
    if args.optimizer == 'sgd':
        print('Creating sgd optimizer')
        optimizer = lbann.optimizer.SGD(
            learn_rate=args.optimizer_learning_rate,
            momentum=0.9,
            nesterov=True
        )
    else:
        optimizer = lbann.contrib.args.create_optimizer(args)

    # Save prototext to args.prototext
    if args.prototext:
        lbann.proto.save_prototext(args.prototext,
                                   model=model,
                                   optimizer=optimizer,
                                   data_reader=data_reader_proto)

    return model, data_reader_proto, optimizer
Esempio n. 2
0
def set_up_experiment(args, input_, probs, labels):
    # Set up objective function
    cross_entropy = lbann.CrossEntropy([probs, labels])
    layers = list(lbann.traverse_layer_graph(input_))
    l2_reg_weights = set()
    for l in layers:
        if type(l) == lbann.Convolution or type(l) == lbann.FullyConnected:
            l2_reg_weights.update(l.weights)
    # scale = weight decay
    l2_reg = lbann.L2WeightRegularization(weights=l2_reg_weights, scale=1e-4)
    objective_function = lbann.ObjectiveFunction([cross_entropy, l2_reg])

    # Set up model
    top1 = lbann.CategoricalAccuracy([probs, labels])
    top5 = lbann.TopKCategoricalAccuracy([probs, labels], k=5)
    metrics = [
        lbann.Metric(top1, name='top-1 accuracy', unit='%'),
        lbann.Metric(top5, name='top-5 accuracy', unit='%')
    ]
    callbacks = [
        lbann.CallbackPrint(),
        lbann.CallbackTimer(),
        lbann.CallbackDropFixedLearningRate(drop_epoch=[30, 60], amt=0.1)
    ]
    model = lbann.Model(args.num_epochs,
                        layers=layers,
                        objective_function=objective_function,
                        metrics=metrics,
                        callbacks=callbacks)

    # Set up data reader
    data_reader = data.imagenet.make_data_reader(num_classes=args.num_classes)

    # Set up optimizer
    if args.optimizer == 'sgd':
        print('Creating sgd optimizer')
        optimizer = lbann.optimizer.SGD(
            learn_rate=args.optimizer_learning_rate,
            momentum=0.9,
            nesterov=True)
    else:
        optimizer = lbann.contrib.args.create_optimizer(args)

    # Setup trainer
    trainer = lbann.Trainer(mini_batch_size=args.mini_batch_size)

    return trainer, model, data_reader, optimizer
Esempio n. 3
0
else:
    # Some other Wide ResNet.
    resnet = resnet_variant_dict[args.resnet](
        imagenet_labels,
        bn_statistics_group_size=args.bn_statistics_group_size,
        width=args.width)

# Construct layer graph
input_ = lbann.Input(target_mode='classification')
images = lbann.Identity(input_)
labels = lbann.Identity(input_)
preds = resnet(images)
probs = lbann.Softmax(preds)
cross_entropy = lbann.CrossEntropy(probs, labels)
top1 = lbann.CategoricalAccuracy(probs, labels)
top5 = lbann.TopKCategoricalAccuracy(probs, labels, k=5)
layers = list(lbann.traverse_layer_graph(input_))

# Setup tensor core operations (just to demonstrate enum usage)
tensor_ops_mode = lbann.ConvTensorOpsMode.NO_TENSOR_OPS
for l in layers:
    if type(l) == lbann.Convolution:
        l.conv_tensor_op_mode = tensor_ops_mode

# Setup objective function
l2_reg_weights = set()
for l in layers:
    if type(l) == lbann.Convolution or type(l) == lbann.FullyConnected:
        l2_reg_weights.update(l.weights)
l2_reg = lbann.L2WeightRegularization(weights=l2_reg_weights, scale=1e-4)
obj = lbann.ObjectiveFunction([cross_entropy, l2_reg])