def run_networks(): """Train networks using three different values for the learning rate, and store the cost curves in the file ``multiple_eta.json``, where they can later be used by ``make_plot``. """ # Make results more easily reproducible random.seed(12345678) np.random.seed(12345678) training_data, validation_data, test_data = mnist_loader.load_data_wrapper( ) results = [] for eta in LEARNING_RATES: print("\nTrain a network using eta = " + str(eta)) net = network1.Network([784, 30, 10]) results.append( net.SGD(training_data, NUM_EPOCHS, 10, eta, lmbda=5.0, evaluation_data=validation_data, monitor_training_cost=True)) f = open("multiple_eta.json", "w") json.dump(results, f) f.close()
def run_networks(): # Make results more easily reproducible random.seed(12345678) np.random.seed(12345678) training_data, validation_data, test_data = mnist_loader.load_data_wrapper( ) net = network1.Network([784, 30, 10], cost=network1.CrossEntropyCost()) accuracies = [] for size in SIZES: print("\n\nTraining network with data set size %s" % size) net.large_weight_initializer() num_epochs = 1500000 // size net.SGD(training_data[:size], num_epochs, 10, 0.5, lmbda=size * 0.0001) accuracy = net.accuracy(validation_data) / 100.0 print("Accuracy was %s percent" % accuracy) accuracies.append(accuracy) f = open("more_data.json", "w") json.dump(accuracies, f) f.close()
def run_network(filename, num_epochs, training_set_size=1000, lmbda=0.0): """Train the network for ``num_epochs`` on ``training_set_size`` images, and store the results in ``filename``. Those results can later be used by ``make_plots``. Note that the results are stored to disk in large part because it's convenient not to have to ``run_network`` each time we want to make a plot (it's slow). """ # Make results more easily reproducible random.seed(12345678) np.random.seed(12345678) training_data, validation_data, test_data = mnist_loader.load_data_wrapper( ) net = network1.Network([784, 30, 10], cost=network1.CrossEntropyCost()) net.large_weight_initializer() test_cost, test_accuracy, training_cost, training_accuracy \ = net.SGD(training_data[:training_set_size], num_epochs, 10, 0.5, evaluation_data=test_data, lmbda = lmbda, monitor_evaluation_cost=True, monitor_evaluation_accuracy=True, monitor_training_cost=True, monitor_training_accuracy=True) f = open(filename, "w") json.dump([test_cost, test_accuracy, training_cost, training_accuracy], f) f.close()
""" run_script.py ------------- The module will let the users run our neural network. The program will return the accuracy after each epochs """ import mnist_loader ## importing the mnist_loader module ## used for loading the mnist dataset import network1 ## network1 is the module that implements the neural network concepts training_data, validation_data, test_data = mnist_loader.load_data_wrapper() ## using the load_data_wrapper() to prepare the dataset in the required format net = network1.Network([784, 30, 10]) ## constructing the neural network ## having 784 i/p neurons, ## 30 neurons in the hidden layer(n/w has only one hidden layer) ## and 10 o/p neurons representing 0...9 digits net.SGD(training_data, test_data, epochs=30, mini_batch_size=10, eta=3.0) ## calling SGD() function to perform the mini-batch gradient descent ## the function will print the accuracy of each epochs ## slothfulwave612...
import sound_input_loader import network1 training_data, validation_data, test_data = sound_input_loader.input_data_wrapper() training_data = list(training_data) net = network1.Network([441000, 30, 1]) net.SGD(training_data, 30, 5, 3.0, test_data=test_data)