Exemplo n.º 1
0
def classification(neuro_obj = None, epoch = 100000, num_class = 10):

    print 'initialize Neural Network.'
    if neuro_obj: nn_obj = neuro_obj
    else        : nn_obj = mln.Mln().make_neuralnet([28*28, 1000, num_class], ['sigmoid', 'softmax'], 0.01) # mnist classification
#    else        : nn_obj = mln.Mln().make_neuralnet([28*28, 1000, num_class], ['sigmoid', 'softmax'], 0.15) # mnist classification

    print "dump obj..."
    dp.obj_dump(nn_obj, './default-classification.pkl')

    print 'read training data and label.'
    training_data = dp.obj_load_gzip('../../mnist/mnist-training_all.pkl.gz')

    print 'data      size : ', len(training_data)
    print 'label     size : ', len(training_data)

    data_num = len(training_data)

    print '--start--'
    print '@@ Learn Character Recognition @@'
    for j in range(0, epoch):
        prg.show_progressxxx(j+1, epoch)
    
        i = np.random.randint(data_num)
        nn_obj.learn(training_data[i][0], training_data[i][1])

    prg.end_progress()

    print "dump obj..."
    dp.obj_dump(nn_obj, './learn-classification.pkl')
    
    return nn_obj
Exemplo n.º 2
0
def binary_classification(neuro_obj=None, epoch=50000):

    if neuro_obj:
        nn_obj = neuro_obj
    else:
        nn_obj = mln.Mln().make_neuralnet([2, 3, 1], ["sigmoid", "sigmoid_binary"], eta=0.15)  # XOR classification

    # use weight decay.
    #    nn_obj.use_weight_decay(0.01)   #
    #    nn_obj.use_weight_decay(0.001)  #
    nn_obj.use_weight_decay(0.0001)  #
    #    nn_obj.unuse_weight_decay()
    # use momentum.
    #    nn_obj.use_momentum(0.1)
    nn_obj.use_momentum(0.5)
    #    nn_obj.use_momentum(0.9)
    #    nn_obj.unuse_momentum()

    training_data = [
        (np.array([[0.0], [0.0]]), np.array([0.0])),
        (np.array([[0.0], [1.0]]), np.array([1.0])),
        (np.array([[1.0], [0.0]]), np.array([1.0])),
        (np.array([[1.0], [1.0]]), np.array([0.0])),
    ]

    # online learning, such as take all training data to NN sequecially.
    dp.obj_dump(nn_obj, "./default-binary-classification.pkl")

    print "-----weight weight-----:"
    print [item.w for item in nn_obj.weight]
Exemplo n.º 3
0
def fitting(neuro_obj=None, epoch=50000, minibatch_size=1):

    if neuro_obj:
        nn_obj = neuro_obj
    else:
        nn_obj = mln.Mln().make_neuralnet([2, 3, 1], ["sigmoid", "sigmoid"], eta=0.15)  # XOR fitting

    # use weight decay.
    #    nn_obj.use_weight_decay(0.01)     # unlearnable
    #    nn_obj.use_weight_decay(0.001)    # unlearnable
    nn_obj.use_weight_decay(0.0001)  # learnable
    #    nn_obj.unuse_weight_decay()
    # use momentum.
    #    nn_obj.use_momentum(0.1)
    nn_obj.use_momentum(0.5)
    #    nn_obj.use_momentum(0.9)
    #    nn_obj.unuse_momentum()

    training_data = [
        (np.array([[0.0], [0.0]]), np.array([0.0])),
        (np.array([[0.0], [1.0]]), np.array([1.0])),
        (np.array([[1.0], [0.0]]), np.array([1.0])),
        (np.array([[1.0], [1.0]]), np.array([0.0])),
    ]

    # batch learning, such as take small size packaged training data to NN which like a batch sequencially.
    dp.obj_dump(nn_obj, "./default-fitting-batch.pkl")

    print "-----weight weight-----:"
    print [item.w for item in nn_obj.weight]
Exemplo n.º 4
0
def classification(neuro_obj=None, epoch=100000, minibatch_size=1):
    num_class = 10

    print "initialize Neural Network."
    if neuro_obj:
        nn_obj = neuro_obj
    else:
        nn_obj = mln.Mln().make_neuralnet(
            [28 * 28, 1000, num_class], ["sigmoid", "softmax"], 0.15
        )  # mnist classification
    #    else        : nn_obj = mln.Mln().make_neuralnet([28*28, 1000, num_class], ['sigmoid', 'softmax'], 0.01) # mnist classification

    # use weight decay.
    #    nn_obj.use_weight_decay(0.01)   # unlearnable
    #    nn_obj.use_weight_decay(0.001)  # learnable
    nn_obj.use_weight_decay(0.0001)  # learnable
    #    nn_obj.unuse_weight_decay()
    # use momentum.
    #    nn_obj.use_momentum(0.1)
    nn_obj.use_momentum(0.5)
    #    nn_obj.use_momentum(0.9)
    #    nn_obj.unuse_momentum()

    print "dump obj..."
    dp.obj_dump(nn_obj, "./default-classification-batch.pkl")

    print "read training data and label."
    training_data = dp.obj_load_gzip("../../mnist/mnist-training_all.pkl.gz")

    print "data      size : ", len(training_data)
    print "label     size : ", len(training_data)
    print "minibatch size : ", minibatch_size

    data_num = len(training_data)

    print "--start--"
    print "@@ Learn Character Recognition @@"
    mb_size = minibatch_size
    for i in range(0, epoch):
        prg.show_progressxxx(i + 1, epoch)

        xxx = random.sample(training_data, data_num)
        x = []
        d = []
        for i in range(0, mb_size):
            x.append(xxx[i][0])
            d.append(xxx[i][1])

        nn_obj.batch_learn(x[0:mb_size], d[0:mb_size], mb_size)
    prg.end_progress()

    print "dump obj..."
    dp.obj_dump(nn_obj, "./learn-classification-batch.pkl")

    return nn_obj
Exemplo n.º 5
0
def classification(neuro_obj=None, epoch=100000, num_class=10):

    print "initialize Neural Network."
    if neuro_obj:
        nn_obj = neuro_obj
    else:
        nn_obj = mln.Mln().make_neuralnet(
            [28 * 28, 1000, num_class], ["sigmoid", "softmax"], 0.01
        )  # mnist classification
    #    else        : nn_obj = mln.Mln().make_neuralnet([28*28, 1000, num_class], ['sigmoid', 'softmax'], 0.15) # mnist classification

    # use weight decay.
    #    nn_obj.use_weight_decay(0.01)   #
    #    nn_obj.use_weight_decay(0.001)  #
    #    nn_obj.use_weight_decay(0.0001) # 20.02% error, with momentum 0.5.
    nn_obj.use_weight_decay(0.00001)  #  8.45% error, with momentum 0.5.
    #    nn_obj.unuse_weight_decay()
    # use momentum.
    #    nn_obj.use_momentum(0.1)
    nn_obj.use_momentum(0.5)
    #    nn_obj.use_momentum(0.9)
    #    nn_obj.unuse_momentum()

    print "dump obj..."
    dp.obj_dump(nn_obj, "./default-classification.pkl")

    print "read training data and label."
    training_data = dp.obj_load_gzip("../../mnist/mnist-training_all.pkl.gz")

    print "data      size : ", len(training_data)
    print "label     size : ", len(training_data)

    data_num = len(training_data)

    print "--start--"
    print "@@ Learn Character Recognition @@"
    for j in range(0, epoch):
        prg.show_progressxxx(j + 1, epoch)

        i = np.random.randint(data_num)
        nn_obj.learn(training_data[i][0], training_data[i][1])

    prg.end_progress()

    print "dump obj..."
    dp.obj_dump(nn_obj, "./learn-classification.pkl")

    return nn_obj
Exemplo n.º 6
0
def fitting(neuro_obj = None, epoch = 50000):

    if neuro_obj: nn_obj = neuro_obj
    else        : nn_obj = mln.Mln().make_neuralnet([2, 3, 1], ['sigmoid', 'sigmoid'], eta = 0.15) # XOR fitting

    training_data = \
        [\
            (np.array([[0.], [0.]]), np.array([0.])),\
            (np.array([[0.], [1.]]), np.array([1.])),\
            (np.array([[1.], [0.]]), np.array([1.])),\
            (np.array([[1.], [1.]]), np.array([0.])),\
        ]

    # online learning, such as take all training data to NN sequecially.
    dp.obj_dump(nn_obj, './default-fitting.pkl')

    print "-----weight weight-----:"; print [item.w for item in nn_obj.weight]
Exemplo n.º 7
0
def binary_classification(neuro_obj = None, epoch = 50000, minibatch_size = 1):
 
    if neuro_obj: nn_obj = neuro_obj
    else        : nn_obj = mln.Mln().make_neuralnet([2, 3, 1], ['sigmoid', 'sigmoid_binary'], eta = 0.15) # XOR classification

    training_data = \
        [\
            (np.array([[0.], [0.]]), np.array([0.])),\
            (np.array([[0.], [1.]]), np.array([1.])),\
            (np.array([[1.], [0.]]), np.array([1.])),\
            (np.array([[1.], [1.]]), np.array([0.])),\
        ]

    # online learning, such as take all training data to NN sequecially.
    dp.obj_dump(nn_obj, './default-binary-classification-batch.pkl')

    print "-----weight weight-----:"; print [item.w for item in nn_obj.weight]
Exemplo n.º 8
0
def fitting(neuro_obj = None, epoch = 50000, minibatch_size = 1):

    if neuro_obj: nn_obj = neuro_obj
    else        : nn_obj = mln.Mln().make_neuralnet([2, 3, 1], ['sigmoid', 'sigmoid'], eta = 0.15) # XOR fitting

    training_data = \
        [\
            (np.array([[0.], [0.]]), np.array([0.])),\
            (np.array([[0.], [1.]]), np.array([1.])),\
            (np.array([[1.], [0.]]), np.array([1.])),\
            (np.array([[1.], [1.]]), np.array([0.])),\
        ]

    # batch learning, such as take small size packaged training data to NN which like a batch sequencially.
    dp.obj_dump(nn_obj, './default-fitting-batch.pkl')

    print "-----weight weight-----:"; print [item.w for item in nn_obj.weight]
Exemplo n.º 9
0
def classification(neuro_obj = None, epoch = 100000, minibatch_size = 1):
    num_class = 10

    print 'initialize Neural Network.'
    if neuro_obj: nn_obj = neuro_obj
    else        : nn_obj = mln.Mln().make_neuralnet([28*28, 1000, num_class], ['sigmoid', 'softmax'], 0.15) # mnist classification
#    else        : nn_obj = mln.Mln().make_neuralnet([28*28, 1000, num_class], ['sigmoid', 'softmax'], 0.01) # mnist classification

    print "dump obj..."
    dp.obj_dump(nn_obj, './default-classification-batch.pkl')

    print 'read training data and label.'
    training_data = dp.obj_load_gzip('../../mnist/mnist-training_all.pkl.gz')

    print 'data      size : ', len(training_data)
    print 'label     size : ', len(training_data)
    print "minibatch size : ", minibatch_size

    data_num = len(training_data)

    print '--start--'
    print '@@ Learn Character Recognition @@'
    mb_size = minibatch_size
    for i in range(0, epoch):
        prg.show_progressxxx(i+1, epoch)

        xxx = random.sample(training_data, data_num)
        x = []
        d = []
        for i in range(0, mb_size):
            x.append(xxx[i][0])
            d.append(xxx[i][1])

        nn_obj.batch_learn(x[0:mb_size], d[0:mb_size], mb_size)
    prg.end_progress()

    print "dump obj..."
    dp.obj_dump(nn_obj, './learn-classification-batch.pkl')
    
    return nn_obj
Exemplo n.º 10
0
import time
import numpy as np
import Mln as mln
import dump as dp
import progress as prg

#from sklearn.preprocessing import LabelBinarizer

start = time.time()

# 多層パーセプトロン
learning_count = 50000
neuro_obj = mln.Mln().make_neuralnet([2, 3, 1], ['sigmoid', 'sigmoid'], 0.01, solved = 'classification')

# Initialization
dp.obj_dump(neuro_obj, './default-br.dump')

# XORの入出力データ
input_data = [[0., 0.], [0.,  1.], [ 1., 0.], [ 1.,  1.]]
teach_data = [    [0.],      [1.],      [1.],       [0.]]

data_num = len(input_data)

print '--start--'
print '@@ Learn Character Recognition @@'
for j in range(0, learning_count):
    prg.show_progressxxx(j+1, learning_count)
    
    for i in range(0, 4):
        neuro_obj.learn(input_data[i], teach_data[i])
prg.end_progress()
Exemplo n.º 11
0
        ]

    # online learning, such as take all training data to NN sequecially.
    dp.obj_dump(nn_obj, './default-fitting.pkl')

    print "-----weight weight-----:"; print [item.w for item in nn_obj.weight]
    print "-----weight bios-----:"; print [item.b for item in nn_obj.weight]
    
    for i in range(0, epoch):
        prg.show_progressxxx(i+1, epoch)
        
        for j in range(0, 4):
            nn_obj.learn(training_data[j][0], training_data[j][1])
    prg.end_progress()
            
    dp.obj_dump(nn_obj, './learn-fitting.pkl')

    print "@@@@@TEST@@@@@"
    nn_obj.feedforward(training_data[0][0], training_data[0][1])
    print nn_obj.node[-1].z
    nn_obj.feedforward(training_data[1][0], training_data[1][1])
    print nn_obj.node[-1].z
    nn_obj.feedforward(training_data[2][0], training_data[2][1])
    print nn_obj.node[-1].z
    nn_obj.feedforward(training_data[3][0], training_data[3][1])
    print nn_obj.node[-1].z
    
    print "@@@@@END@@@@@"
    print "-----weight weight-----:"; print [item.w for item in nn_obj.weight]
    print "-----weight bios-----:"; print [item.b for item in nn_obj.weight]
Exemplo n.º 12
0
    mb_size = minibatch_size
    for i in range(0, epoch):
        prg.show_progressxxx(i + 1, epoch)

        xxx = random.sample(training_data, 4)
        x = []
        d = []
        for i in range(0, mb_size):
            x.append(xxx[i][0])
            d.append(xxx[i][1])

        nn_obj.batch_learn(x[0:mb_size], d[0:mb_size], mb_size)
    prg.end_progress()

    dp.obj_dump(nn_obj, "./learn-fitting-batch.pkl")

    print "@@@@@TEST@@@@@"
    nn_obj.feedforward(training_data[0][0], training_data[0][1])
    print nn_obj.node[-1].z
    nn_obj.feedforward(training_data[1][0], training_data[1][1])
    print nn_obj.node[-1].z
    nn_obj.feedforward(training_data[2][0], training_data[2][1])
    print nn_obj.node[-1].z
    nn_obj.feedforward(training_data[3][0], training_data[3][1])
    print nn_obj.node[-1].z

    print "@@@@@END@@@@@"
    print "-----weight weight-----:"
    print [item.w for item in nn_obj.weight]
    print "-----weight bios-----:"