Exemplo n.º 1
0
    net.push(Relu())
    net.push(Maxpooling(2, 2))  # 6x24x24 -> 6x12x12
    net.push(Conv2d(5, 5, 6, 16))  # 6x12x12 -> 16x8x8
    net.push(Relu())
    net.push(Maxpooling(2, 2))  # 16x8x8 -> 16x4x4
    net.push(Reshape((256)))
    net.push(Linear(256, 84))
    net.push(Relu())
    net.push(Softmax(84, 10))

    # Data
    data = DataProvider()
    n = 10000
    data.train_input(x[:n], y[:n])
    data.test_input(xt, yt)
    data.batch_size(16)

    lr = 0.0009
    gamma = 0.9
    for epoch in xrange(50):
        print 'Epoch: ', epoch

        # Training (Mini-batch)
        now = time.time()
        for _ in xrange(data.batch_run()):
            net.input(data.next_batch())
            net.forward()
            net.backward(lr)
        t = time.time() - now
        acc, loss = net.get_record()
        print 'Acc:    ', np.array(acc).mean()
Exemplo n.º 2
0
    net = Net()
    net.push(Conv2d(5, 5, 3, 20))  # 3x32 -> 10x28
    net.push(Relu())
    net.push(BatchNorm())
    net.push(Maxpooling(4, 4))  # 10x28 -> 10x7
    net.push(Reshape((980)))
    net.push(Linear(980, 200))
    net.push(Relu())
    net.push(BatchNorm())
    net.push(Softmax(200, 10))

    # Data
    data = DataProvider()
    data.train_input(x, y)
    data.test_input(xt, yt)
    data.batch_size(32)
    data.batch_size_test(1000)

    lr = 1e-3
    gamma = 1
    beta_1 = 0.9
    beta_2 = 0.999
    total_epoch = 100

    loss_cache = 10
    for epoch in xrange(1, total_epoch + 1):
        print 'Epoch: {}/{}'.format(epoch, total_epoch)

        # Training (Mini-batch)
        now = time.time()
        data.shuffle()