Example #1
0
import numpy as np
import os
from PIL import Image
from network import NN
from op import Fullyconnected, Sigmoid, SimpleBatchNorm, Relu

img_dir = "./imgs/"

model_path = './model/mnist_mlp_epoch9.model'

# Construct nn
MLP = NN()
MLP.add(SimpleBatchNorm(name="data_batchnorm", istraining=False))
MLP.add(Fullyconnected(784, 512, name="fc1"))
MLP.add(Relu(name="fc1_relu"))
MLP.add(SimpleBatchNorm(name='fc1_batchnorm'))
MLP.add(Fullyconnected(512, 512, name="fc2"))
MLP.add(Relu(name="fc2_relu"))
MLP.add(Fullyconnected(512, 10, name="fc3"))

# Load model
MLP.load_model(model_path)

for parent, dirnames, filenames in os.walk(img_dir):
    for filename in filenames:
        if filename.endswith("jpg") or filename.endswith("png"):
            img_path = os.path.join(parent, filename)
            pil_img = Image.open(img_path).convert('L')
            pil_img = pil_img.resize((28, 28), Image.ANTIALIAS)
            img = np.array(pil_img)
            out_data = MLP.forward(img.reshape((1, 784)))
Example #2
0
from loader import mnist
from network import NN
from op import Fullyconnected, Sigmoid, SimpleBatchNorm, Relu, Dropout
import numpy as np

# Set hyper-params here
batch_size = 20
learning_rate = 0.01
learning_step = [6]
weight_decay = 0.
total_epoch = 12
model_path = './model/'

# Construct nn
MLP = NN(learning_rate=learning_rate)
MLP.add(SimpleBatchNorm(name='data_batchnorm', istraining=True))
MLP.add(Dropout(ratio=0.3, name='data_dropout'))
MLP.add(Fullyconnected(784, 512, name="fc1"))
MLP.add(SimpleBatchNorm(name='fc1_batchnorm'))
MLP.add(Relu(name="fc1_relu"))
MLP.add(Fullyconnected(512, 512, name="fc2"))
MLP.add(Relu(name="fc2_relu"))
MLP.add(Fullyconnected(512, 10, name="fc3"))
MLP.set_wd(weight_decay)
MLP.set_lr(learning_rate)

# Load mnist data
mnist = mnist(path="./data/", batch_size=batch_size)
num_imgs = mnist.get_num()

epoch = 0