示例#1
0
    def test_xor_tanh(self):
        nnet = Network(
            Perceptron(2, 2, activation='tanh'),
            Perceptron(2, 1, activation='tanh')
        )

        teacher = MinibatchSGDTeacher(nnet, test_set, batch_size=1, learning_rate=0.6)
        e = Evaluator(nnet)

        for _ in xrange(0, 200):
            teacher.train_epoch()

        self.assertEquals(map(lambda x: round(x[0]), e.evaluate(test_set[0].get_value())), [0, 1, 1, 0])
示例#2
0
    def test_xor_relu(self):
        nnet = Network(
            Perceptron(2, 2, activation='relu'),
            Perceptron(2, 2, activation='softmax')
        )

        teacher = MinibatchSGDTeacher(nnet, test_set, batch_size=1, learning_rate=0.05)
        e = Evaluator(nnet)
        c = Classifier(nnet)

        for _ in xrange(0, 400):
            teacher.train_epoch()

        self.assertTrue(all(p == q for p, q in zip(c.classify(test_set[0].get_value()), [0,1,1,0])))
示例#3
0
data = pickle.load(open('../mnist.pkl', 'rb'))
data = [(
   theano.shared(np.asarray(das[0], dtype=theano.config.floatX), borrow=True),
   theano.shared(np.asarray(das[1], dtype=np.int32), borrow=True)
 ) for das in data]


nnet = Network(
            PerceptronLayer(28*28, 500, activation='relu', dropout=0.5),
            PerceptronLayer(500, 300, activation='relu', dropout=0.5),
            PerceptronLayer(300, 100, activation='relu', dropout=0.5),
            PerceptronLayer(100, 30, activation='relu', dropout=0.5),
            PerceptronLayer(30, 10, activation='softmax')
   )

teacher = MinibatchSGDTeacher(nnet, data[0], batch_size=100, learning_rate=0.1)
validator = Validator(nnet, data[1])
test_validator = Validator(nnet, data[2])

for i in xrange(0, 100):
    error = validator.validate() * 100
    print('Accuracy (validation) is %s%%' % (error, ))
#    teacher.learning_rate = 1 / (i+2)     -  uncommen to adapt learning rate
    teacher.train_epoch()


test_error = 100 - (test_validator.validate() * 100)
print('Model error rate is %s%%' % (test_error, ))

示例#4
0
from nnetsys.ff import Network, Perceptron, MinibatchSGDTeacher, Validator, Classifier

data = pickle.load(open('mnist.pkl', 'rb'))
data = [(
   theano.shared(np.asarray(das[0], dtype=theano.config.floatX), borrow=True),
   theano.shared(np.asarray(das[1], dtype=np.int32), borrow=True)                                
 ) for das in data]


nnet = Network(
            Perceptron(28*28, 200, activation='tanh'),
            Perceptron(200, 30, activation='tanh'),
            Perceptron(30, 10, activation='softmax')
   )

teacher = MinibatchSGDTeacher(nnet, data[0], batch_size=20, learning_rate=0.5, l2=0.0001)
validator = Validator(nnet, data[1])
classifier = Classifier(nnet)

#while error > 3:            # learn!
for i in xrange(0, 100):
        
    if (i % 10) == 0:
        error = validator.validate() * 100
        print('Accuracy is %s%%' % (error, ))
    
    teacher.learning_rate = 1 / (i+2)
    teacher.train_epoch()