Example #1
0
def test_inception(devs, kv_type):
    # guarantee the same weight init for each run
    mx.random.seed(0)
    logging.basicConfig(level=logging.DEBUG)

    (train, val) = common.cifar10(batch_size=128, input_shape=(3, 28, 28))

    model = mx.model.FeedForward.create(ctx=devs,
                                        symbol=common.inception(),
                                        X=train,
                                        eval_data=val,
                                        kvstore=kv_type,
                                        num_round=10,
                                        learning_rate=0.1,
                                        momentum=0.9,
                                        wd=0.00001,
                                        initializer=mx.init.Uniform(0.07))

    return common.accuracy(model, val)
Example #2
0
def test_inception(devs, kv_type):
    # guarantee the same weight init for each run
    mx.random.seed(0)
    logging.basicConfig(level=logging.DEBUG)

    (train, val) = common.cifar10(batch_size = 128, input_shape=(3,28,28))

    model = mx.model.FeedForward.create(
        ctx           = devs,
        symbol        = common.inception(),
        X             = train,
        eval_data     = val,
        kvstore       = kv_type,
        num_epoch     = 10,
        learning_rate = 0.1,
        momentum      = 0.9,
        wd            = 0.00001,
        initializer   = mx.init.Uniform(0.07))

    return common.accuracy(model, val)
#!/usr/bin/env python
# pylint: skip-file
import common
import mxnet as mx
import logging

mx.random.seed(0)
logging.basicConfig(level=logging.DEBUG)
kv = mx.kvstore.create('dist_async')
(train, val) = common.cifar10(num_parts = kv.num_workers,
                              part_index = kv.rank,
                              batch_size = 128,
                              input_shape=(3,28,28))
devs = [mx.gpu(i) for i in range(2)]
model = mx.model.FeedForward.create(
    ctx           = devs,
    kvstore       = kv,
    symbol        = common.inception(),
    X             = train,
    eval_data     = val,
    num_epoch     = 20,
    learning_rate = 0.05,
    momentum      = 0.9,
    wd            = 0.00001,
    initializer   = mx.init.Uniform(0.07))

common.accuracy(model, val)