Ejemplo n.º 1
0
# multi layer perceptron
model = chainer.FunctionSet(l1=F.Linear(2, 100),
                            l2=F.Linear(100, 100),
                            l3=F.Linear(100, 2))


# define forwarding method
def forward(self, x, train):
    h = F.relu(self.model.l1(x))
    h = F.relu(self.model.l2(h))
    y = self.model.l3(h)

    return y


mlp = Classifier(model, gpu=-1)
mlp.set_forward(forward)
mlp.set_optimizer(Opt.AdaDelta, {'rho': 0.9})

arr = []
t = []
for i in range(10000):
    x, y = (numpy.random.rand() - 0.5), (numpy.random.rand() - 0.5)
    arr.append(numpy.array([x, y]))
    if (x < 0. and y < 0.) or (x > 0. and y > 0.):
        t.append(0)
    else:
        t.append(1)

print(
    mlp.train(
Ejemplo n.º 2
0
                            conv3=F.Convolution2D(30, 64, 3, pad=1),
                            fl4=F.Linear(2304, 576),
                            fl5=F.Linear(576, 10))


def forward(self, x, train):
    h = F.max_pooling_2d(F.relu(model.bn1(model.conv1(x))), 2)
    h = F.relu(model.bn2(model.conv2(h)))
    h = F.max_pooling_2d(F.relu(model.conv3(h)), 2)
    h = F.dropout(F.relu(model.fl4(h)), train=False)
    y = model.fl5(h)

    return y


cnn = Classifier(model, gpu=-1)
cnn.set_forward(forward)

mnist = fetch_mldata('MNIST original', data_home='.')
perm = numpy.random.permutation(len(mnist.data))
mnist.data = mnist.data.astype(numpy.float32).reshape(70000, 1, 28, 28) / 255
mnist.target = mnist.target.astype(numpy.int32)
train_data = mnist.data[perm][:60000]
train_label = mnist.target[perm][:60000]
test_data = mnist.data[perm][60000:]
test_label = mnist.target[perm][60000:]

for epoch in range(15):
    print('epoch : %d' % (epoch + 1))
    err, acc = cnn.train(train_data, train_label, batchsize=200)
    print(acc, err)