def expanded_data():
    expanded_training_data, _, _ = network3.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    for j in range(3):
        print "Training with expanded data, run num %s" % j
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                          filter_shape=(20, 1, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                          filter_shape=(40, 20, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100,
                                activation_fn=ReLU),
            SoftmaxLayer(n_in=100, n_out=10)
        ], mini_batch_size)
        net.SGD(expanded_training_data,
                20,
                mini_batch_size,
                0.03,
                validation_data,
                test_data,
                lmbda=0.1)
Ejemplo n.º 2
0
def test_4():
    """全连接 + 卷积混合层 + 卷积混合层 + 全连接 + softmax
       激活函数:修正线性单元
       代价函数:L2规范化
       测试准确率:99.18%
    """

    name = sys._getframe().f_code.co_name
    print(name + "\n")

    training_data, validation_data, test_data = network3.load_data_shared()
    mini_batch_size = 10

    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100, activation_fn=ReLU),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)

    net.SGD(training_data,
            60,
            mini_batch_size,
            0.03,
            validation_data,
            test_data,
            lmbda=0.1)
def double_fc_dropout(p0, p1, p2, repetitions):
    expanded_training_data, _, _ = network3.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    nets = []
    for j in range(repetitions):
        print "\n\nTraining using a dropout network with parameters ", p0, p1, p2
        print "Training with expanded data, run num %s" % j
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                          filter_shape=(20, 1, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                          filter_shape=(40, 20, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            FullyConnectedLayer(
                n_in=40 * 4 * 4, n_out=1000, activation_fn=ReLU, p_dropout=p0),
            FullyConnectedLayer(
                n_in=1000, n_out=1000, activation_fn=ReLU, p_dropout=p1),
            SoftmaxLayer(n_in=1000, n_out=10, p_dropout=p2)], mini_batch_size)
        net.SGD(expanded_training_data, 40, mini_batch_size, 0.03,
                validation_data, test_data)
        nets.append(net)
    return nets
Ejemplo n.º 4
0
def double_fc_dropout(p0, p1, p2, repetitions):
    expanded_training_data, _, _ = network3.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    nets = []
    for j in range(repetitions):
        print("\n\nTraining using a dropout network with parameters "
              ), p0, p1, p2
        print("Training with expanded data, run num %s") % j
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                          filter_shape=(20, 1, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                          filter_shape=(40, 20, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            FullyConnectedLayer(
                n_in=40 * 4 * 4, n_out=1000, activation_fn=ReLU, p_dropout=p0),
            FullyConnectedLayer(
                n_in=1000, n_out=1000, activation_fn=ReLU, p_dropout=p1),
            SoftmaxLayer(n_in=1000, n_out=10, p_dropout=p2)
        ], mini_batch_size)
        net.SGD(expanded_training_data, 40, mini_batch_size, 0.03,
                validation_data, test_data)
        nets.append(net)
    return nets
Ejemplo n.º 5
0
def expanded_data_double_fc(n=100):
    """n is the number of neurons in both fully-connected layers.  We'll
    try n=100, 300, and 1000.

    """
    expanded_training_data, _, _ = network3.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    for j in range(3):
        print(
            "Training with expanded data, %s neurons in two FC layers, run num %s"
        ) % (n, j)
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                          filter_shape=(20, 1, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                          filter_shape=(40, 20, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            FullyConnectedLayer(n_in=40 * 4 * 4, n_out=n, activation_fn=ReLU),
            FullyConnectedLayer(n_in=n, n_out=n, activation_fn=ReLU),
            SoftmaxLayer(n_in=n, n_out=10)
        ], mini_batch_size)
        net.SGD(expanded_training_data,
                60,
                mini_batch_size,
                0.03,
                validation_data,
                test_data,
                lmbda=0.1)
Ejemplo n.º 6
0
def test_0():
    """全连接 + 全连接 + softmax, 测试准确率97:80%
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    training_data, validation_data, test_data = network3.load_data_shared()

    mini_batch_size = 10
    net = Network([
        FullyConnectedLayer(n_in=784, n_out=100),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data,
            test_data)
Ejemplo n.º 7
0
def train():
    training_data, validation_data, dummy = network3.load_data_shared(
        filename="data/mnist_1_percent_expanded_10_step.pkl.gz")

    mini_batch_size = 10
    net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                          filter_shape=(20, 1, 5, 5),
                          poolsize=(2, 2)),
            FullyConnectedLayer(n_in=20*12*12, n_out=100),
            SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
    accuracies = net.SGD(training_data, 10000, mini_batch_size, 0.1, validation_data)

    with open("result/accuracy_1_percent_expanded_10_step.pkl", "w") as f:
        f.write(pickle.dumps(accuracies))
        f.close()
Ejemplo n.º 8
0
def test_1():
    """全连接 + 卷积混合层 + softmax,测试准确率98.48%
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    training_data, validation_data, test_data = network3.load_data_shared()

    mini_batch_size = 10
    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2)),
        SoftmaxLayer(n_in=20 * 12 * 12, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data,
            test_data)
Ejemplo n.º 9
0
def sample(p0,p1,p2,repetitions):
    expanded_training_data, _, _ = network3.load_data_shared(
        "../data/face256.pkl.gz")
    nets = []
    for j in range(repetitions):
        print "\n\nTraining using a dropout network with parameters ",p0,p1,p2
        print "Training with expanded data, run num %s" % j
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 256, 256), 
                          filter_shape=(20, 1, 3, 3), 
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 127, 127), 
                          filter_shape=(40, 20, 2, 2), 
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 40, 63, 63), 
                          filter_shape=(80, 40, 2, 2), 
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 80, 31, 31), 
                          filter_shape=(100, 80, 2, 2), 
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 100, 15, 15), 
                          filter_shape=(160, 100, 2, 2), 
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 160, 7, 7), 
                          filter_shape=(200, 160, 2, 2), 
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 200, 3, 3), 
                          filter_shape=(240, 200, 2, 2), 
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            FullyConnectedLayer(
                n_in=240*1*1, n_out=5000, activation_fn=ReLU, p_dropout=p0),
            FullyConnectedLayer(
                n_in=5000, n_out=2000, activation_fn=ReLU, p_dropout=p1),
            SoftmaxLayer(n_in=2000, n_out=7, p_dropout=p2)], mini_batch_size)
        net.SGD(expanded_training_data, 10, mini_batch_size, 0.3, 
                validation_data, test_data)
        nets.append(net)
    return nets
Ejemplo n.º 10
0
def test_6():
    """全连接 + 卷积混合层 + 卷积混合层 + 全连接 + softmax
       激活函数:修正线性单元
       代价函数:L2规范化
       训练数据:使用扩展数据集,将数据集多扩张8倍
       测试准确率:99.45% (60 epochs), 99.58% (600 epochs)
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    # 扩展数据集多扩展8倍
    src_path = '../../minst-data/data/mnist.pkl.gz'
    dst_path = '../../minst-data/data/mnist_expanded_8.pkl.gz'

    study_note.mnistTest().expand_mnist(src_path=src_path,
                                        dst_path=dst_path,
                                        expand_count=8)

    training_data, validation_data, test_data = \
        network3.load_data_shared(dst_path)
    mini_batch_size = 10

    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100, activation_fn=ReLU),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)

    net.SGD(training_data,
            60,
            mini_batch_size,
            0.03,
            validation_data,
            test_data,
            lmbda=0.1)
Ejemplo n.º 11
0
def test_3():
    """全连接 + 卷积混合层 + 卷积混合层 + 全连接 + softmax,测试准确率99.09%
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    training_data, validation_data, test_data = network3.load_data_shared()
    mini_batch_size = 10

    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2)),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2)),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data,
            test_data)
Ejemplo n.º 12
0
def test_7():
    """全连接 + 卷积混合层 + 卷积混合层 + 全连接 + 全连接 + softmax
       激活函数:修正线性单元
       代价函数:L2规范化
       训练数据:使用扩展数据集
       测试准确率:99.49%
    """
    name = sys._getframe().f_code.co_name
    print(name + "\n")

    # 扩展数据集
    expand_mnist.expand_mnist_data()
    dst_path = "../../minst-data/data/mnist_expanded.pkl.gz"

    training_data, validation_data, test_data = \
        network3.load_data_shared(dst_path)
    mini_batch_size = 10

    net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100, activation_fn=ReLU),
        FullyConnectedLayer(n_in=100, n_out=100, activation_fn=ReLU),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data,
            60,
            mini_batch_size,
            0.03,
            validation_data,
            test_data,
            lmbda=0.1)
def expanded_data_double_fc(n=100):
    """n is the number of neurons in both fully-connected layers.  We'll
    try n=100, 300, and 1000.

    """
    expanded_training_data, _, _ = network3.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    for j in range(3):
        print "Training with expanded data, %s neurons in two FC layers, run num %s" % (n, j)
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                          filter_shape=(20, 1, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                          filter_shape=(40, 20, 5, 5),
                          poolsize=(2, 2),
                          activation_fn=ReLU),
            FullyConnectedLayer(n_in=40 * 4 * 4, n_out=n, activation_fn=ReLU),
            FullyConnectedLayer(n_in=n, n_out=n, activation_fn=ReLU),
            SoftmaxLayer(n_in=n, n_out=10)], mini_batch_size)
        net.SGD(expanded_training_data, 60, mini_batch_size, 0.03,
                validation_data, test_data, lmbda=0.1)
def main_03():
    t1 = time.gmtime(time.time())
    print('training NN3 running....%d:%d:%d' %
          (t1.tm_hour, t1.tm_min, t1.tm_sec))
    from network3 import Network
    from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer

    training_data, validation_data, test_data = network3.load_data_shared()
    mini_batch_size = 10
    net = network3.Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2)),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2)),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100),
        SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data,
            test_data)
    t1 = time.gmtime(time.time())
    print('training NN3 running....%d:%d:%d end' %
          (t1.tm_hour, t1.tm_min, t1.tm_sec))
Ejemplo n.º 15
0
import network3
test_data = network3.load_data_shared()[2]
import cPickle

gg = open('net3.p', 'rb')
net3= cPickle.load(gg)
gg.close()

import mnist_loader

training_data, validation_data, test_data = mnist_loader.load_data_wrapper()

imgIndex = 45
testImg = mnist_loader.load_data_wrapper()[2][imgIndex]

print str(net3.feedback2(testImg[0]))
Ejemplo n.º 16
0
        result[0], result[1], result[2], result[3], result[4], result[5],
        result[6], result[7], result[8], result[9]).split(',')
    return result
    #return [float(i) for i in result]


if __name__ == '__main__':
    import network3
    from network3 import Network
    from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
    from network3 import ReLU
    import os, struct, csv
    from collections import Counter

    expanded_training_data, training_data, validation_data, test_data = \
                    network3.load_data_shared(expanded_time=15)

    mini_batch_size = 100
    voters = 1
    vote_box = []
    vote_prob_box = []
    for vote in xrange(voters):
        #expanded_training_data, validation_data, test_data = \
        #                        network3.load_data_shared(expanded_time=10)
        # from book chap6
        #'''
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 29, 29),
                          filter_shape=(6, 1, 17, 17),
                          poolsize=(1, 1),
                          activation_fn=ReLU),
Ejemplo n.º 17
0
import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
from network3 import ReLU

training_data, validation_data, test_data = network3.load_data_shared()
expanded_training_data, _, _ = network3.load_data_shared("mnist_expanded.pkl.gz")
mini_batch_size = 10

net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        FullyConnectedLayer(n_in=40*4*4, n_out=100, activation_fn=ReLU),
        SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(expanded_training_data, 30, mini_batch_size, 0.03,
            validation_data, test_data, lmbda=0.1)
Ejemplo n.º 18
0
import network3
from network3 import Network 
from network3 import SoftmaxLayer, FullyConnectedLayer, ConvPoolLayer
import winsound 



mini_batch_size = 10

f = FullyConnectedLayer(n_in = 20 * 12 * 12, n_out = 100)
s = SoftmaxLayer(n_in = 100, n_out = 25)
c = ConvPoolLayer(image_shape = (mini_batch_size, 1, 28, 28),
	filter_shape = (20, 1, 5, 5), 
	poolsize = (2,2))

net = Network([c, f ,s], mini_batch_size)

print "\n Network Created. Now Loading Data. Please be patient. \n"

train, valid, test = network3.load_data_shared()

print "\n \n \nData Sucessfully Loaded \n"
#winsound.Beep(2100, 1000) 

net.SGD(train, 30, mini_batch_size, 0.1, valid, test) 
Ejemplo n.º 19
0
import mnist_loader
import network3



expanded_training_data, _, _ = network3.load_data_shared(
        "../data/mnist_expanded.pkl.gz")


net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2, 2),
                      activation_fn=ReLU),
        FullyConnectedLayer(
            n_in=40*4*4, n_out=1000, activation_fn=ReLU, p_dropout=0.5),
        FullyConnectedLayer(
            n_in=1000, n_out=1000, activation_fn=ReLU, p_dropout=0.5),
        SoftmaxLayer(n_in=1000, n_out=10, p_dropout=0.5)],
        mini_batch_size)


net.SGD(expanded_training_data, 40, mini_batch_size, 0.03,
            validation_data, test_data)


# 调用SGD随机梯度下降算法 (使用cross-entropy函数)
Ejemplo n.º 20
0
# net.large_weight_initializer()
# net.SGD(training_data, 30, 10, 0.5, evaluation_data=test_data, monitor_evaluation_accuracy=True)

# import network2

# net = network2.Network([784, 100, 10], cost=network2.CrossEntropyCost)
# net.large_weight_initializer()
# print 'begin to learn'
# net.SGD(training_data, 60, 10, 0.5, lmbda=5.0,evaluation_data=validation_data,monitor_evaluation_accuracy=True)
# print 'finish learn'

import network3
from network3 import ReLU
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
training_data, validation_data, test_data = network3.load_data_shared(
    "../data1/elastic_rotate_expand.pkl")
mini_batch_size = 10
# expanded_training_data, _, _ = network3.load_data_shared(
#          "../data/mnist_expanded.pkl.gz")
# net = Network([
#         FullyConnectedLayer(n_in=784, n_out=100),
#         SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
# net.SGD(training_data, 60, mini_batch_size, 0.1,
#             validation_data, test_data)

net = Network([
    ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                  filter_shape=(20, 1, 5, 5),
                  poolsize=(2, 2),
                  activation_fn=ReLU),
    ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
Ejemplo n.º 21
0
import network3
import numpy as np
import scipy
import matplotlib.pyplot as plt
import PIL

training_data, _, _ = network3.load_data_shared()
training_x, training_y = training_data
x = training_x.get_value()[0]  # vector corresponding to a 5
x_img = np.reshape(x, (-1, 28))  # recognizable 5
y = training_y.eval()[0]  # label: 5


#### Translate
# Randomly translate the image by -1, 0 or 1 pixel right and down
x_tr = np.random.randint(-1, 2)  # x > 0 means translated to the right
print("translation: {} pixel right".format(x_tr))
y_tr = np.random.randint(-1, 2)  # y > 0 means translated down
print("translation: {} pixel down".format(y_tr))

if x_tr != 0:
    x_img = np.roll(x_img, x_tr, 1)  # 1: x axis
    if x_tr > 0:
        # The image is to be translated to the right
         x_img[:, 0:x_tr] = np.zeros((28, x_tr))
    else:
        # The image is to be translated to the left
        x_img[:, 28+x_tr:] = np.zeros((28, -x_tr))

if y_tr != 0:
    x_img = np.roll(x_img, y_tr, 0)  # 0: y axis
Ejemplo n.º 22
0
    result = "{:.3},{:.3},{:.3},{:.3},{:.3},{:.3},{:.3},{:.3},{:.3},{:.3}".format(
        result[0],result[1],result[2],result[3],result[4],result[5],result[6],result[7],result[8],result[9]).split(',')
    return result
    #return [float(i) for i in result]


if __name__ == '__main__':
    import network3
    from network3 import Network
    from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
    from network3 import ReLU
    import os, struct, csv
    from collections import Counter

    expanded_training_data, training_data, validation_data, test_data = \
                    network3.load_data_shared(expanded_time=15)
    
    mini_batch_size = 100
    voters = 1
    vote_box = []
    vote_prob_box = []
    for vote in xrange(voters):
        #expanded_training_data, validation_data, test_data = \
        #                        network3.load_data_shared(expanded_time=10)
        # from book chap6
        #'''
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 29, 29), 
                          filter_shape=(6, 1, 17, 17), poolsize=(1, 1), 
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 6, 13, 13), 
import network3
import matplotlib.pyplot as plt
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
from network3 import ReLU

training_data, validation_data, test_data = \
            network3.load_data_shared('../data/bird_image_full.pkl.gz')

mini_batch_size = 10

net = Network([
        ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                      filter_shape=(20, 1, 5, 5),
                      poolsize=(2,2),
                      activation_fn=ReLU),
        ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                      filter_shape=(40, 20, 5, 5),
                      poolsize=(2,2),
                      activation_fn=ReLU),
        FullyConnectedLayer(n_in=40*4*4, n_out=100, activation_fn=ReLU, p_dropout=0.5),        
        SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)

costs, train_acc = net.SGD(training_data, 200, mini_batch_size, 0.1, validation_data, test_data, lmbda=0.4,
        diminishing_lr=True) 
it = range(0, 1000 * len(costs), 1000)
plt.plot(it, costs)
plt.ylabel('cost')
plt.show()

#net.save("out.txt")
Ejemplo n.º 24
0
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 16 12:22:00 2017

@author: Syrine Belakaria
"""
import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
from network3 import ReLU
from network3 import sigmoid
import numpy as np
import matplotlib.pyplot as plt

expanded_training_data, validation_data, test_data = network3.load_data_shared(
    "../data/quickdraw_expanded.pkl.gz")
mini_batch_size = 10
net = Network([
    ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                  filter_shape=(20, 1, 5, 5),
                  poolsize=(2, 2)),
    ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                  filter_shape=(40, 20, 3, 3),
                  poolsize=(2, 2)),
    FullyConnectedLayer(
        n_in=40 * 5 * 5, n_out=500, activation_fn=sigmoid, p_dropout=0.1),
    FullyConnectedLayer(
        n_in=500, n_out=500, activation_fn=sigmoid, p_dropout=0.1),
    SoftmaxLayer(n_in=500, n_out=10, p_dropout=0.1)
], mini_batch_size)
net.SGD(expanded_training_data, 70, mini_batch_size, 0.01, validation_data,
Ejemplo n.º 25
0
#     m_test = test_data[0].shape[0]
#     ind_test = list(range(m_test))
#     random.shuffle(ind_test)
#     ind_test = ind_test[0:m_test // percentage]

#     training_data = (training_data[0][ind_train, :],
#                      training_data[1][ind_train])
#     validation_data = (validation_data[0][ind_val, :],
#                        validation_data[1][ind_val])
#     test_data = (test_data[0][ind_test, :],
#                  test_data[1][ind_test])

#     return training_data, validation_data, test_data

training_data, validation_data, test_data = network3.load_data_shared(
    percentage=10)
mini_batch_size = 30

net = Network([
    ConvPoolLayer(input_shape=(mini_batch_size, 1, 28, 28),
                  filter_shape=(20, 1, 5, 5),
                  poolsize=(2, 2),
                  activation_fn=ReLU),
    ConvPoolLayer(input_shape=(mini_batch_size, 20, 12, 12),
                  filter_shape=(40, 20, 5, 5),
                  poolsize=(2, 2),
                  activation_fn=ReLU),
    FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100, activation_fn=ReLU),
    SoftmaxLayer(n_in=100, n_out=10)
], mini_batch_size)
Ejemplo n.º 26
0
import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
from network3 import ReLU

training_data, validation_data, test_data = network3.load_data_shared()
expanded_training_data, _, _ = network3.load_data_shared("mnist_expanded.pkl.gz")
mini_batch_size = 10

net = Network(
    [
        ConvPoolLayer(
            image_shape=(mini_batch_size, 1, 28, 28), filter_shape=(20, 1, 5, 5), poolsize=(2, 2), activation_fn=ReLU
        ),
        ConvPoolLayer(
            image_shape=(mini_batch_size, 20, 12, 12), filter_shape=(40, 20, 5, 5), poolsize=(2, 2), activation_fn=ReLU
        ),
        FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100, activation_fn=ReLU),
        SoftmaxLayer(n_in=100, n_out=10),
    ],
    mini_batch_size,
)
net.SGD(expanded_training_data, 30, mini_batch_size, 0.03, validation_data, test_data, lmbda=0.1)
Ejemplo n.º 27
0
import network3 as nw


expanded_training_data, validation_data, test_data = nw.load_data_shared(
        "../data/mnist.pkl.gz")

mini_batch_size = 30

net = nw.Network([
        nw.ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28), 
                      filter_shape=(20, 1, 5, 5), 
                      poolsize=(2, 2), 
                      activation_fn=nw.ReLU),
        nw.ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12), 
                      filter_shape=(40, 20, 5, 5), 
                      poolsize=(2, 2), 
                      activation_fn=nw.ReLU),
        nw.FullyConnectedLayer(
            n_in=40*4*4, n_out=1000, activation_fn=nw.ReLU, p_dropout=0.5),
        nw.FullyConnectedLayer(
            n_in=1000, n_out=1000, activation_fn=nw.ReLU, p_dropout=0.5),
        nw.SoftmaxLayer(n_in=1000, n_out=10, p_dropout=0.5)], 
        mini_batch_size)

net.SGD(expanded_training_data, 60, mini_batch_size, 0.03, 
            validation_data, test_data, lmbda=0.1)
Ejemplo n.º 28
0
import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer


training_data, validation_data, test_data = network3.load_data_shared()
mini_batch_size = 10
net = Network([FullyConnectedLayer(n_in=784, n_out=100), SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)
Ejemplo n.º 29
0
#!python35
# -*- coding: utf-8 -*-
import network3
from network3 import Network, ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer  # softmax plus log-likelihood cost is more common in modern image classification networks.

# read data:
training_data, validation_data, test_data = network3.load_data_shared(
    '../data/mnist.pkl.gz')
# mini-batch size:
mini_batch_size = 10
net = Network([
    ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                  filter_shape=(20, 1, 5, 5),
                  poolsize=(2, 2)),
    ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                  filter_shape=(40, 20, 5, 5),
                  poolsize=(2, 2)),
    FullyConnectedLayer(n_in=40 * 4 * 4, n_out=100),
    SoftmaxLayer(n_in=100, n_out=10)
], mini_batch_size)
net.SGD_kaggle(training_data, 60, mini_batch_size, 0.1, validation_data,
               test_data)