Ejemplo n.º 1
0
            nn.Conv2D(channels=384, kernel_size=3,
                      padding=1, activation='relu'),
            nn.Conv2D(channels=384, kernel_size=3,
                      padding=1, activation='relu'),
            nn.MaxPool2D(pool_size=3, strides=2),
            nn.Flatten(),
            nn.Dense(4096, activation='relu'),
            nn.Dropout(.5),
            nn.Dense(4096, activation='relu'),
            nn.Dropout(.5),
            nn.Dense(10)
        )
    return net

batch_size = 64
train_iter, test_iter = utils.loadMnistData(batch_size, resize=224)
# train_iter, test_iter = utils.load_data_fashion_mnist(batch_size, resize=224)

# for data, label in train_iter:
#     print(data.shape)
#     break
net = get_net()
ctx = utils.getCtx()

net.initialize(ctx=ctx, init=init.Xavier())

softmax_loss = loss.SoftmaxCrossEntropyLoss()

epochs = 5

trainer = Trainer(net.collect_params(), 'sgd', {'learning_rate': 0.5})
Ejemplo n.º 2
0
from mxnet import gluon
from mxnet import autograd
from mxnet import ndarray as nd
from utils import loadMnistData, accuracy, evaluate_accuracy

net = gluon.nn.Sequential()
with net.name_scope():
    net.add(gluon.nn.Flatten())
    net.add(gluon.nn.Dense(256, activation="relu"))
    net.add(gluon.nn.Dense(10))
net.initialize()



batch_size = 256
train_data, test_data = loadMnistData(batch_size)

softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss()
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate': 0.5})

for epoch in range(5):
    train_loss = 0.
    train_acc = 0.
    for data, label in train_data:
        with autograd.record():
            output = net(data)
            loss = softmax_cross_entropy(output, label)
        loss.backward()
        trainer.step(batch_size)

        train_loss += nd.mean(loss).asscalar()
Ejemplo n.º 3
0
from mxnet import autograd
from utils import accuracy, evaluate_accuracy, loadMnistData

batch_size = 256

num_hidden = 256

net = gluon.nn.Sequential()

with net.name_scope():
    net.add(gluon.nn.Flatten())
    net.add(gluon.nn.Dense(256, activation="relu"))
    net.add(gluon.nn.Dense(10))

net.initialize()
train_iter, test_iter = loadMnistData(batch_size)
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate': 0.5})

softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss()

epochs = 5
for epoch in range(epochs):
    total_loss = .0
    total_acc = .0
    for data, label in train_iter:
        with autograd.record():
            output = net(data)
            loss = softmax_cross_entropy(output, label)
        loss.backward()
        trainer.step(batch_size)