Ejemplo n.º 1
0
def test_4():
    """全连接 + 卷积混合层 + 卷积混合层 + 全连接 + softmax
       激活函数:修正线性单元
       代价函数:L2规范化
       测试准确率:99.18%
    """

    name = sys._getframe().f_code.co_name
    print(name + "\n")

    training_data, validation_data, test_data = network3.load_data_shared()
    mini_batch_size = 10

    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100, activation_fn=ReLU),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)

    net.SGD(training_data,
            60,
            mini_batch_size,
            0.03,
            validation_data,
            test_data,
            lmbda=0.1)
Ejemplo n.º 2
0
def elu():
    net = None
    for j in range(RUNS):
        print "num %s, leaky relu, with regularization %s" % (j, 0.0001)
        net = Network([
            ConvPoolLayer(image_shape=(MB_SIZE, 1, IMAGE_SIZE, IMAGE_SIZE),
                          filter_shape=(5, 1, 12, 12),
                          poolsize=(3, 3),
                          activation_fn=ELU),
            ConvPoolLayer(image_shape=(MB_SIZE, 5, 30, 30),
                          filter_shape=(10, 5, 3, 3),
                          poolsize=(2, 2),
                          activation_fn=ELU),
            FullyConnectedLayer(
                n_in=10 * 14 * 14, n_out=200, activation_fn=ELU),
            FullyConnectedLayer(n_in=200, n_out=200, activation_fn=ELU),
            FullyConnectedLayer(n_in=200, n_out=100, activation_fn=ELU),
            SoftmaxLayer(n_in=100, n_out=2)
        ], MB_SIZE)
        net.SGD("ELU",
                training_data,
                EPOCHS,
                MB_SIZE,
                ETA,
                validation_data,
                test_data,
                lmbda=0.0001)
    return net
Ejemplo n.º 3
0
def dbl_conv_relu():
    for lmbda in [0.0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1.0, 10.0, 100.0]:
        for j in range(3):
            print("Conv + Conv + FC num %s, relu, with regularization %s") % (
                j, lmbda)
            net = Network([
                ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                              filter_shape=(20, 1, 5, 5),
                              poolsize=(2, 2),
                              activation_fn=ReLU),
                ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                              filter_shape=(40, 20, 5, 5),
                              poolsize=(2, 2),
                              activation_fn=ReLU),
                FullyConnectedLayer(
                    n_in=40 * 4 * 4, n_out=100, activation_fn=ReLU),
                SoftmaxLayer(n_in=100, n_out=10)
            ], mini_batch_size)
            net.SGD(training_data,
                    60,
                    mini_batch_size,
                    0.03,
                    validation_data,
                    test_data,
                    lmbda=lmbda)
Ejemplo n.º 4
0
def expanded_data_double_fc(n=100):
    """n is the number of neurons in both fully-connected layers.  We'll
    try n=100, 300, and 1000.

    """
    expanded_training_data, _, _ = network3.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    for j in range(3):
        print(
            "Training with expanded data, %s neurons in two FC layers, run num %s"
        ) % (n, j)
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                          filter_shape=(20, 1, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                          filter_shape=(40, 20, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            FullyConnectedLayer(n_in=40 * 4 * 4, n_out=n, activation_fn=ReLU),
            FullyConnectedLayer(n_in=n, n_out=n, activation_fn=ReLU),
            SoftmaxLayer(n_in=n, n_out=10)
        ], mini_batch_size)
        net.SGD(expanded_training_data,
                60,
                mini_batch_size,
                0.03,
                validation_data,
                test_data,
                lmbda=0.1)
Ejemplo n.º 5
0
def double_fc_dropout(p0, p1, p2, repetitions):
    expanded_training_data, _, _ = network3.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    nets = []
    for j in range(repetitions):
        print("\n\nTraining using a dropout network with parameters "
              ), p0, p1, p2
        print("Training with expanded data, run num %s") % j
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                          filter_shape=(20, 1, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                          filter_shape=(40, 20, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            FullyConnectedLayer(
                n_in=40 * 4 * 4, n_out=1000, activation_fn=ReLU, p_dropout=p0),
            FullyConnectedLayer(
                n_in=1000, n_out=1000, activation_fn=ReLU, p_dropout=p1),
            SoftmaxLayer(n_in=1000, n_out=10, p_dropout=p2)
        ], mini_batch_size)
        net.SGD(expanded_training_data, 40, mini_batch_size, 0.03,
                validation_data, test_data)
        nets.append(net)
    return nets
def expanded_data():
    expanded_training_data, _, _ = network3.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    for j in range(3):
        print "Training with expanded data, run num %s" % j
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                          filter_shape=(20, 1, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                          filter_shape=(40, 20, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100,
                                activation_fn=ReLU),
            SoftmaxLayer(n_in=100, n_out=10)
        ], mini_batch_size)
        net.SGD(expanded_training_data,
                20,
                mini_batch_size,
                0.03,
                validation_data,
                test_data,
                lmbda=0.1)
def shallow():
    for j in range(3):
        print "A shallow net with 100 hidden neurons"
        net = Network([
            FullyConnectedLayer(n_in=784, n_out=100),
            SoftmaxLayer(n_in=100, n_out=10)
        ], mini_batch_size)
        net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data,
                test_data)
Ejemplo n.º 8
0
def omit_FC():
    for j in range(3):
        print "Conv only, no FC"
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28), 
                          filter_shape=(20, 1, 5, 5), 
                          poolsize=(2, 2)),
            SoftmaxLayer(n_in=20*12*12, n_out=10)], mini_batch_size)
        net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)
    return net 
Ejemplo n.º 9
0
def test_drop(mini_batch_size):
    size = 100
    f_s = 7
    padding = 1
    conv_stride = 1
    pool_stride = 1
    pool_size = 4
    n_f1 = 20
    n_f2 = 40
    n_f3 = 80

    co1 = ((size - f_s + 2 * padding) / conv_stride) + 1
    po1 = ((co1 - pool_size) / pool_stride) + 1
    print(po1)
    co2 = ((po1 - f_s + 2 * padding) / conv_stride) + 1
    po2 = ((co2 - pool_size) / pool_stride) + 1
    print(po2)
    co3 = ((po2 - f_s + 2 * padding) / conv_stride) + 1
    po3 = ((co3 - pool_size) / pool_stride) + 1
    print(po3)

    layer1 = ConvPoolLayer(input_shape=(mini_batch_size, 1, size, size),
                           filter_shape=(n_f1, 1, f_s, f_s),
                           poolsize=(4, 4),
                           activation_fn=ReLU)

    layer2 = ConvPoolLayer(input_shape=(mini_batch_size, n_f1, po1, po1),
                           filter_shape=(n_f2, n_f1, f_s, f_s),
                           poolsize=(4, 4),
                           activation_fn=ReLU)

    layer3 = ConvPoolLayer(input_shape=(mini_batch_size, n_f2, po2, po2),
                           filter_shape=(n_f3, n_f2, f_s, f_s),
                           poolsize=(4, 4),
                           activation_fn=ReLU)

    layer4 = FullyConnectedLayer(n_in=n_f3 * po3 * po3,
                                 n_out=1000,
                                 activation_fn=ReLU,
                                 p_dropout=0.0)
    layer5 = FullyConnectedLayer(n_in=1000,
                                 n_out=500,
                                 activation_fn=ReLU,
                                 p_dropout=0.0)
    layer6 = SoftmaxLayer(n_in=500, n_out=2, p_dropout=0.0)

    net = Network([layer1, layer2, layer3, layer4, layer5, layer6],
                  mini_batch_size)
    net.SGD(training_data,
            10,
            mini_batch_size,
            0.001,
            validation_data,
            test_data,
            lmbda=0.0)
def digit_recognize_test():
    #载入数据
    train = pd.read_csv('../../input/train.csv').sample(20000)
    print('train: ' + str(train.shape))
    test = pd.read_csv('../../input/test.csv')
    print('test: ' + str(test.shape))
    train.head()
    #train: (2000, 785)
    #test: (28000, 784)

    # feature matrix
    X = train.ix[:, 1:]
    # response vector
    X = X.values
    Y = train['label']
    Y = Y.values
    #训练数据
    training_inputs = [x for x in X]
    training_results = [y for y in Y]
    training_data = [training_inputs[:10000], training_results[:10000]]
    validation_data = [training_inputs[10000:], training_results[10000:]]
    print(np.shape(training_results))
    #测试数据
    test_data = test.values
    #predit_inputs = [np.reshape(x, (784, 1)) for x in test_data]

    training_data = shared(training_data)
    validation_data = shared(validation_data)
    test_data = theano.shared(np.asarray(test_data,
                                         dtype=theano.config.floatX),
                              borrow=True)

    mini_batch_size = 10
    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2)),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2)),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    predictions = net.SGD_kaggle(training_data, 20, mini_batch_size, 0.1,
                                 validation_data, test_data)
    ####尚未解决

    #输出数据
    digit_preds = pd.Series(predictions)
    image_ids = pd.Series(np.arange(1, len(digit_preds) + 1))
    submission = pd.DataFrame([image_ids, digit_preds]).T
    submission.columns = ['ImageId', 'Label']
    submission.to_csv('../../output/dr_result.csv', index=False, header=True)
    print("finished!")
Ejemplo n.º 11
0
def shallow(n=3, epochs=60):
    nets = []
    for j in range(n):
        print("A shallow net with 100 hidden neurons")
        net = Network([
            FullyConnectedLayer(n_in=784, n_out=100),
            SoftmaxLayer(n_in=100, n_out=10)
        ], mini_batch_size)
        net.SGD(training_data, epochs, mini_batch_size, 0.1, validation_data,
                test_data)
        nets.append(net)
    return nets
Ejemplo n.º 12
0
def basic_conv(n=3, epochs=60):
    for j in range(n):
        print "Conv + FC architecture"
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28), 
                          filter_shape=(20, 1, 5, 5), 
                          poolsize=(2, 2)),
            FullyConnectedLayer(n_in=20*12*12, n_out=100),
            SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
        net.SGD(
            training_data, epochs, mini_batch_size, 0.1, validation_data, test_data)
    return net 
Ejemplo n.º 13
0
def test_conv(mini_batch_size):
    nets = []
    net = Network(
        [
            # Layer 0
            # 1 Input image of size  = 100 x 100
            # 20 Filters of size     = 5 x 5
            # poolsize               = 2 x 2 ( stride length 2)
            # output of layer 0      = 20 feature Images of size 48 x 48
            ConvPoolLayer(input_shape=(mini_batch_size, 1, 224, 224),
                          filter_shape=(64, 1, 3, 3),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            # Layer 1
            # 20 Input images of size = 48 x 48
            # 40 Filters of size      = 5 x 5
            # poolsize                = 2 x 2 ( stride length = 2)
            # output of layer 1       = 40 feature Images of size 22 x 22
            ConvPoolLayer(input_shape=(mini_batch_size, 64, 112, 112),
                          filter_shape=(128, 64, 3, 3),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(input_shape=(mini_batch_size, 128, 56, 56),
                          filter_shape=(256, 128, 3, 3),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(input_shape=(mini_batch_size, 256, 28, 28),
                          filter_shape=(512, 256, 3, 3),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(input_shape=(mini_batch_size, 512, 14, 14),
                          filter_shape=(512, 512, 3, 3),
                          poolsize=(2, 2),
                          activation_fn=ReLU),

            # Layer 2
            FullyConnectedLayer(n_in=512 * 7 * 7,
                                n_out=4096,
                                activation_fn=ReLU,
                                p_dropout=0.0),
            FullyConnectedLayer(
                n_in=4096, n_out=1000, activation_fn=ReLU, p_dropout=0.0),
            SoftmaxLayer(n_in=1000, n_out=2, p_dropout=0.0)
        ],
        mini_batch_size)
    # End of Network Architecture

    net.SGD(training_data, 10, mini_batch_size, 0.01, validation_data,
            test_data)
    nets.append(net)
    return nets
Ejemplo n.º 14
0
def basic_softmax_NN():
    mini_batch_size = 10
    train_data, val_data, test_data = go_parser.parse_games(1000,
                                                            test_percent=0.2,
                                                            val_percent=0.2,
                                                            onehot=False)
    net = Network(
        [
            # FullyConnectedLayer(n_in=361, n_out=200),
            SoftmaxLayer(n_in=361, n_out=361)
        ],
        mini_batch_size)
    net.SGD(shared(train_data), 50, mini_batch_size, 0.1, shared(val_data),
            shared(test_data))
Ejemplo n.º 15
0
def test_0():
    """全连接 + 全连接 + softmax, 测试准确率97:80%
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    training_data, validation_data, test_data = network3.load_data_shared()

    mini_batch_size = 10
    net = Network([
        FullyConnectedLayer(n_in=784, n_out=100),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data,
            test_data)
Ejemplo n.º 16
0
def dbl_conv(activation_fn=sigmoid):
    for j in range(3):
        print "Conv + Conv + FC architecture"
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28), 
                          filter_shape=(20, 1, 5, 5), 
                          poolsize=(2, 2),
                          activation_fn=activation_fn),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12), 
                          filter_shape=(40, 20, 5, 5), 
                          poolsize=(2, 2),
                          activation_fn=activation_fn),
            FullyConnectedLayer(
                n_in=40*4*4, n_out=100, activation_fn=activation_fn),
            SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
        net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)
    return net 
Ejemplo n.º 17
0
def test_1():
    """全连接 + 卷积混合层 + softmax,测试准确率98.48%
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    training_data, validation_data, test_data = network3.load_data_shared()

    mini_batch_size = 10
    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2)),
        SoftmaxLayer(n_in=20 * 12 * 12, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data,
            test_data)
Ejemplo n.º 18
0
def test_ding(mini_batch_size):

    size = 32
    f_s = 5
    padding = 1
    conv_stride = 1
    pool_stride = 1
    pool_size = 3
    n_f1 = 20
    n_f2 = 40

    co1 = ((size - f_s + 2 * padding) / conv_stride) + 1
    po1 = ((co1 - pool_size) / pool_stride) + 1
    #print(po1)
    co2 = ((po1 - f_s + 2 * padding) / conv_stride) + 1
    po2 = ((co2 - pool_size) / pool_stride) + 1
    #print(po2)

    layer1 = ConvPoolLayer(input_shape=(mini_batch_size, 1, size, size),
                           filter_shape=(n_f1, 1, f_s, f_s),
                           poolsize=(pool_size, pool_size),
                           activation_fn=ReLU)

    #layer2 = ConvPoolLayer(input_shape=(mini_batch_size, n_f1, po1 , po1),
    #              filter_shape=(n_f2, n_f1, f_s, f_s),
    #              poolsize=(pool_size, pool_size),
    #              activation_fn=ReLU)

    layer3 = FullyConnectedLayer(n_in=n_f1 * po1 * po1,
                                 n_out=500,
                                 activation_fn=ReLU,
                                 p_dropout=0.0)

    layer4 = SoftmaxLayer(n_in=500, n_out=2, p_dropout=0.0)

    net = Network([layer1, layer3, layer4], mini_batch_size)

    net.SGD(training_data,
            50,
            mini_batch_size,
            0.3,
            validation_data,
            training_data,
            lmbda=0.0)
Ejemplo n.º 19
0
def test_small(mini_batch_size):

    size = 100
    f_s = 5
    padding = 1
    conv_stride = 1
    pool_stride = 1
    pool_size = 3
    n_f1 = 30
    n_f2 = 60

    co1 = ((size - f_s + 2 * padding) / conv_stride) + 1
    po1 = ((co1 - pool_size) / pool_stride) + 1
    co2 = ((po1 - f_s + 2 * padding) / conv_stride) + 1
    po2 = ((co2 - pool_size) / pool_stride) + 1

    net = Network([
        ConvPoolLayer(input_shape=(mini_batch_size, 1, size, size),
                      filter_shape=(n_f1, 1, f_s, f_s),
                      poolsize=(pool_size, pool_size),
                      activation_fn=ReLU),
        ConvPoolLayer(input_shape=(mini_batch_size, n_f1, po1, po1),
                      filter_shape=(n_f2, n_f1, f_s, f_s),
                      poolsize=(pool_size, pool_size),
                      activation_fn=ReLU),
        FullyConnectedLayer(n_in=n_f2 * po2 * po2,
                            n_out=100,
                            activation_fn=ReLU,
                            p_dropout=0.0),
        SoftmaxLayer(n_in=100, n_out=2, p_dropout=0.0)
    ], mini_batch_size)

    net.SGD(training_data,
            100,
            mini_batch_size,
            0.01,
            validation_data,
            test_data,
            lmbda=0.0)

    f = open('../data/network.cnn', 'wb')
    cPickle.dump(net, f, protocol=2)
    f.close()
Ejemplo n.º 20
0
def test_6():
    """全连接 + 卷积混合层 + 卷积混合层 + 全连接 + softmax
       激活函数:修正线性单元
       代价函数:L2规范化
       训练数据:使用扩展数据集,将数据集多扩张8倍
       测试准确率:99.45% (60 epochs), 99.58% (600 epochs)
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    # 扩展数据集多扩展8倍
    src_path = '../../minst-data/data/mnist.pkl.gz'
    dst_path = '../../minst-data/data/mnist_expanded_8.pkl.gz'

    study_note.mnistTest().expand_mnist(src_path=src_path,
                                        dst_path=dst_path,
                                        expand_count=8)

    training_data, validation_data, test_data = \
        network3.load_data_shared(dst_path)
    mini_batch_size = 10

    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100, activation_fn=ReLU),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)

    net.SGD(training_data,
            60,
            mini_batch_size,
            0.03,
            validation_data,
            test_data,
            lmbda=0.1)
Ejemplo n.º 21
0
def test_digit(mini_batch_size):

    size = 100
    f_s = 10
    padding = 1
    conv_stride = 1
    pool_stride = 1
    pool_size = 5
    n_f1 = 25
    n_f2 = 50

    co1 = ((size - f_s + 2 * padding) / conv_stride) + 1
    po1 = ((co1 - pool_size) / pool_stride) + 1
    co2 = ((po1 - f_s + 2 * padding) / conv_stride) + 1
    po2 = ((co2 - pool_size) / pool_stride) + 1

    net = Network(
        [
            ConvPoolLayer(input_shape=(mini_batch_size, 1, size, size),
                          filter_shape=(n_f1, 1, f_s, f_s),
                          poolsize=(pool_size, pool_size),
                          activation_fn=ReLU),
            ConvPoolLayer(input_shape=(mini_batch_size, n_f1, po1, po1),
                          filter_shape=(n_f2, n_f1, f_s, f_s),
                          poolsize=(pool_size, pool_size),
                          activation_fn=ReLU),
            FullyConnectedLayer(n_in=n_f2 * po2 * po2,
                                n_out=500,
                                activation_fn=ReLU,
                                p_dropout=0.0),
            #FullyConnectedLayer(n_in=1000, n_out=1000, activation_fn=ReLU, p_dropout=0.0),
            SoftmaxLayer(n_in=500, n_out=2, p_dropout=0.0)
        ],
        mini_batch_size)

    net.SGD(training_data,
            100,
            mini_batch_size,
            0.01,
            validation_data,
            test_data,
            lmbda=0.0)
Ejemplo n.º 22
0
def test_3():
    """全连接 + 卷积混合层 + 卷积混合层 + 全连接 + softmax,测试准确率99.09%
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    training_data, validation_data, test_data = network3.load_data_shared()
    mini_batch_size = 10

    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2)),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2)),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data,
            test_data)
Ejemplo n.º 23
0
def test_7():
    """全连接 + 卷积混合层 + 卷积混合层 + 全连接 + 全连接 + softmax
       激活函数:修正线性单元
       代价函数:L2规范化
       训练数据:使用扩展数据集
       测试准确率:99.49%
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    # 扩展数据集
    expand_mnist.expand_mnist_data()
    dst_path = "../../minst-data/data/mnist_expanded.pkl.gz"

    training_data, validation_data, test_data = \
        network3.load_data_shared(dst_path)
    mini_batch_size = 10

    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100, activation_fn=ReLU),
        FullyConnectedLayer(n_in=100, n_out=100, activation_fn=ReLU),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data,
            60,
            mini_batch_size,
            0.03,
            validation_data,
            test_data,
            lmbda=0.1)
Ejemplo n.º 24
0
def test_basic(mini_batch_size):
    nets = []
    net = Network(
        [
            ConvPoolLayer(input_shape=(mini_batch_size, 1, 224, 224),
                          filter_shape=(64, 1, 3, 3),
                          poolsize=(2, 2),
                          activation_fn=ReLU),

            # Layer 2
            FullyConnectedLayer(
                n_in=64 * 112 * 112, n_out=40, activation_fn=ReLU),

            # Activation function
            SoftmaxLayer(n_in=40, n_out=2)
        ],
        mini_batch_size)
    # End of Network Architecture

    net.SGD(training_data, 10, mini_batch_size, 0.001, validation_data,
            test_data)
    nets.append(net)
    return nets
def main_03():
    t1 = time.gmtime(time.time())
    print('training NN3 running....%d:%d:%d' %
          (t1.tm_hour, t1.tm_min, t1.tm_sec))
    from network3 import Network
    from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer

    training_data, validation_data, test_data = network3.load_data_shared()
    mini_batch_size = 10
    net = network3.Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2)),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2)),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data,
            test_data)
    t1 = time.gmtime(time.time())
    print('training NN3 running....%d:%d:%d end' %
          (t1.tm_hour, t1.tm_min, t1.tm_sec))
Ejemplo n.º 26
0
expanded_training_data, validation_data, test_data = network3.load_data_shared(
    "../data/quickdraw_expanded.pkl.gz")
mini_batch_size = 10
net = Network([
    ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                  filter_shape=(20, 1, 5, 5),
                  poolsize=(2, 2)),
    ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                  filter_shape=(40, 20, 3, 3),
                  poolsize=(2, 2)),
    FullyConnectedLayer(
        n_in=40 * 5 * 5, n_out=500, activation_fn=sigmoid, p_dropout=0.1),
    FullyConnectedLayer(
        n_in=500, n_out=500, activation_fn=sigmoid, p_dropout=0.1),
    SoftmaxLayer(n_in=500, n_out=10, p_dropout=0.1)
], mini_batch_size)
net.SGD(expanded_training_data, 70, mini_batch_size, 0.01, validation_data,
        test_data)
####Plot the weights of first layer
weights0 = np.load("/home/syrine92/Syrine_Belakaria/weights0.npy")
i = -1
fig, ax = plt.subplots(nrows=4, ncols=5)
for row in ax:
    for col in row:
        col.axes.get_xaxis().set_visible(False)
        col.axes.get_yaxis().set_visible(False)
        i = i + 1
        col.imshow(weights0[i][0], cmap='Greys')
fig.savefig('/home/syrine92/Syrine_Belakaria/weights0.png')
####Plot the weights of second layer
Ejemplo n.º 27
0
import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
training_data, validation_data, test_data = network3.load_data_shared()
mini_batch_size = 10

net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28), 
                      filter_shape=(20, 1, 5, 5), 
                      poolsize=(2, 2)),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12), 
                      filter_shape=(40, 20, 5, 5), 
                      poolsize=(2, 2)),
        FullyConnectedLayer(n_in=40*4*4, n_out=100),
        SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)

net.SGD(training_data, 60, mini_batch_size, 0.1, 
            validation_data, test_data)

Ejemplo n.º 28
0
import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
training_data, validation_data, test_data = network3.load_data_shared()
mini_batch_size = 10
net = Network([
    ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                  filter_shape=(20, 1, 5, 5),
                  poolsize=(2, 2),
                  activation_fn=network3.tanh),
    FullyConnectedLayer(
        n_in=20 * 12 * 12, n_out=100, activation_fn=network3.tanh),
    SoftmaxLayer(n_in=100, n_out=10)
], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)
Ejemplo n.º 29
0
import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer


training_data, validation_data, test_data = network3.load_data_shared()
mini_batch_size = 10
net = Network([FullyConnectedLayer(n_in=784, n_out=100), SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)
Ejemplo n.º 30
0
# 导入网络类和mnist数据集
import network3
from network3 import ReLU
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
training_data, validation_data, test_data = network3.load_data_shared()
# expanded_training_data, _, _ = network3.load_data_sharaed("../data/mnist_expanded.pkl.gz")
mini_batch_size = 10
# 使用卷积网络优化分类问题

# 测试1,仅适用一个隐藏层,包含100个隐藏层神经元,小批量数据大小为10,
# 60次迭代期,学习速率为0.1
net = Network([
    FullyConnectedLayer(n_in=784, n_out=100),
    SoftmaxLayer(n_in=100, n_out=10)
], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)

# 测试2,使用一个卷积层,使用5*5局部感受野,20个特征映射,2*2混合窗口,一个全连接隐藏层
net = Network([
    ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                  filter_shape=(20, 1, 5, 5),
                  poolsize=(2, 2)),
    FullyConnectedLayer(n_in=20 * 12 * 12, n_out=100),
    SoftmaxLayer(n_in=100, n_out=10)
], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)

# 测试3,使用两个卷积层,均为5*5局部感受野,20个特征映射,2*2混合窗口,一个全连接隐藏层
net = Network([
    ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),