data = pickle.load(open('../mnist.pkl', 'rb')) data = [( theano.shared(np.asarray(das[0], dtype=theano.config.floatX), borrow=True), theano.shared(np.asarray(das[1], dtype=np.int32), borrow=True) ) for das in data] nnet = Network( PerceptronLayer(28*28, 500, activation='relu', dropout=0.5), PerceptronLayer(500, 300, activation='relu', dropout=0.5), PerceptronLayer(300, 100, activation='relu', dropout=0.5), PerceptronLayer(100, 30, activation='relu', dropout=0.5), PerceptronLayer(30, 10, activation='softmax') ) teacher = MinibatchSGDTeacher(nnet, data[0], batch_size=100, learning_rate=0.1) validator = Validator(nnet, data[1]) test_validator = Validator(nnet, data[2]) for i in xrange(0, 100): error = validator.validate() * 100 print('Accuracy (validation) is %s%%' % (error, )) # teacher.learning_rate = 1 / (i+2) - uncommen to adapt learning rate teacher.train_epoch() test_error = 100 - (test_validator.validate() * 100) print('Model error rate is %s%%' % (test_error, ))
from nnetsys.ff import Network, Perceptron, MinibatchSGDTeacher, Validator, Classifier data = pickle.load(open('mnist.pkl', 'rb')) data = [( theano.shared(np.asarray(das[0], dtype=theano.config.floatX), borrow=True), theano.shared(np.asarray(das[1], dtype=np.int32), borrow=True) ) for das in data] nnet = Network( Perceptron(28*28, 200, activation='tanh'), Perceptron(200, 30, activation='tanh'), Perceptron(30, 10, activation='softmax') ) teacher = MinibatchSGDTeacher(nnet, data[0], batch_size=20, learning_rate=0.5, l2=0.0001) validator = Validator(nnet, data[1]) classifier = Classifier(nnet) #while error > 3: # learn! for i in xrange(0, 100): if (i % 10) == 0: error = validator.validate() * 100 print('Accuracy is %s%%' % (error, )) teacher.learning_rate = 1 / (i+2) teacher.train_epoch()