Exemplo n.º 1
0
import mnist_loader
import network

# 训练集、验证集、测试集
training_data, validation_data, test_data = mnist_loader.load_data_wapper()
print(type(training_data))

# 调用SGD随机梯度下降算法
network = network.Network([784, 30, 10])
network.SGD(training_data, 30, 20, 3.0, test_data=test_data)
Exemplo n.º 2
0
# This is the second part of the processing unit, i.e. the neural network section.
# The i_vectors and o_vectors obtained from the file_parser section are combined together
# to generate the training data set for the neural network. The training data set is a three-
# dimensional numpy array, which has a structure like:
# [[[input array 1] [output array 1]]
#  [[input array 2] [output array 2]]
#  [[input array 3] [output array 3]]
#  ...]
training_data = numpy.empty([int(len(i_vectors) - 1), 2], dtype=numpy.ndarray)
i = 0
for i_vector, o_vector in zip(i_vectors, o_vectors):
    i_vector = i_vector[numpy.newaxis]
    i_vector = i_vector.transpose()
    o_vector = o_vector[numpy.newaxis]
    o_vector = o_vector.transpose()
    if i < len(training_data):
        training_data[i, 0] = i_vector
        training_data[i, 1] = o_vector
    else:
        test_data = i_vector
    i = i + 1

# This section deals with instancing the Neural Network class (named Network) and calling its methods
# eta is the learning rate, layers_sizes is a list containg the number of neurons in each of the layers with
# first and last layer being the input and output vectors, respectively.
eta = 1  # Learning Rate
layers_sizes = [len(optypes), 200, len(atomtypes) * 2
                ]  # Layer structure: Input Layer, Hidden Layer, Output Layer
network = Network1(layers_sizes)
network.SGD(training_data, 10, 8, eta, test_data)
Exemplo n.º 3
0
# -*- coding: utf-8 -*-
"""
Spyder Editor

This is a temporary script file.
"""

import mnist_loader
import network
import time

(training_data, validation_data, test_data) = mnist_loader.load_data_wrapper()
print "Load data finished."

network = network.Network([784, 30, 10])
print "Network initialize completed."

start = time.clock()
network.SGD(training_data,
            epochs=30,
            mini_batch_size=10,
            eta=3.0,
            test_data=validation_data)
elapsed = (time.clock() - start)
print("Training Time used:", elapsed)

print "Evaluate: {0} / {1}".format(network.evaluate(test_data), len(test_data))
Exemplo n.º 4
0
    ])

    #Setting up model structure
    layers = [
        net.Linear(in_kernel=784, out_kernel=200, bias=True),
        net.Linear(200, 100, bias=True),
        net.ReLU(),
        net.Linear(100, 10, bias=True),
        net.ReLU(),
        net.Linear(10, 10, bias=True),
        net.SoftMax(),
        net.CrossEntropy()
    ]

    #Init new SGD optimizer
    optimizer = net.SGD(learning_rate=learning_rate)

    #Intit new network
    network = net.Network(layers=layers, optimizer=optimizer)

    #Training Network
    t0 = time.time()
    print("Starting training process...")
    history = network.train(x, y, batch_size, epochs, shuffle=True)

    #Testing Network
    print("Starting evaluation process...")
    evaluation = network.evaluate(x_test, y_test, batch_size)
    print("Total time elapsed: {} minutes".format((time.time() - t0) / 60))

    #Plotting Training Loss