Пример #1
0
hidden_nodes_list = [400]  # default 100
learning_rate_list = [0.05]

best_performance = 0
best_learning_rate = 0
best_hidden_nodes = 0
best_nr_of_epochs = 0

timestamp = time.time()

for h_nodes in hidden_nodes_list:
    print("hidden nodes: ", h_nodes)

    for lr in learning_rate_list:
        print("  learning rate: ", lr)
        mnist = Mnist(h_nodes, lr)

        performance_list = []

        for e in range(epochs):
            print("    train epoch: ", e + 1)

            # train the neural network
            train_start_time = time.time()
            mnist.train()
            print("    training took:", time.time() - train_start_time, "ms")

            # test the neural network
            test_start_time = time.time()
            performance = mnist.test()
            print("    test took:", time.time() - test_start_time, "ms")
Since Hopfield networks are not supervised models, we must
turn classification into a memory recall task. To do this,
we feed the network augmented vectors containing both the
image and a one-hot vector representing the class.
"""
# Import libraries
import numpy as np
import os
import sys
import urllib
import time
import tensorflow as tf
import FileProcess as fipr

from Mnist import Mnist
mnist = Mnist()

from update import hebbian_update
from update import extended_storkey_update
from network import Network
# from tensorflow.examples.tutorials.mnist import input_data

BATCH_SIZE = 100

# Start counting time
start_time = time.clock()

# open and load csv files
time_load_start = time.clock()
X_train, y_train = fipr.load_csv("train_file.csv", True)
X_test, y_test = fipr.load_csv("test_file.csv", True)
Пример #3
0
from Mnist import Mnist
from Network import Network


m = Mnist('train-images-idx3-ubyte', 'train-labels-idx1-ubyte',
              't10k-images-idx3-ubyte', 't10k-labels-idx1-ubyte')
net = Network([784, 30, 10])
net.SGD(m.train_data, 30, 10, 3.0, test_data=m.test_data)
### Build CNN for Data with New Patterns ###

# Import libraries
import numpy as np
import os
import sys
import urllib
import time
import tensorflow as tf
import FileProcess as fipr

from Mnist import Mnist
mnist = Mnist()

sess = tf.InteractiveSession()

# Start counting time
start_time = time.clock()

# open and load csv files
time_load_start = time.clock()
X_train, y_train = fipr.load_csv("train_file.csv", True)
X_test, y_test = fipr.load_csv("test_file.csv", True)
#y_train = y_train.flatten()
#y_test = y_test.flatten()
time_load_end = time.clock()
print("Loading finished, loading time: %g seconds" %
      (time_load_end - time_load_start))

X_test_even, y_test_even = fipr.load_csv("test_file_even.csv", True)
Пример #5
0
from Mnist import Mnist
from NN import NN
import numpy as np

dataset = Mnist("train-images.idx3-ubyte", "train-labels.idx1-ubyte")
X_train = dataset.get_images((0, 20000))
y_train = dataset.get_labels((0, 20000))

X_val = dataset.get_images((20001, 21001))
y_val = dataset.get_labels((20001, 21001))

network = NN()
network.train(X_train, y_train, iterations=10000, batch_size=1000)

print(np.mean(network.predict(X_val) == y_val))
Пример #6
0
### Build CNN for Experiments ###

# Import libraries
import numpy as np
import os
import sys
import urllib
import time
import tensorflow as tf
sess = tf.InteractiveSession()

from Mnist import Mnist
mnist = Mnist()
training_data, training_labels = mnist.load_training_batch()

# Uniformed Distribution of Data
training_data, training_labels = mnist.balance_data(training_data,
                                                    training_labels)
print(type(training_labels))
print(type(training_labels[0, 0]))
print(training_labels.shape)
mnist.print_sample_distribution(training_labels)

training_labels = mnist.make_one_hot(training_labels)

#print("a sample of training label:" % training_labels)

test_data, test_labels = mnist.load_test_batch()
test_data = np.reshape(test_data, (-1, 784))
test_labels = mnist.make_one_hot(test_labels)
Пример #7
0
def start():
    m = Mnist('train-images-idx3-ubyte', 'train-labels-idx1-ubyte',
              't10k-images-idx3-ubyte', 't10k-labels-idx1-ubyte')
    net = Network([784, 30, 10])
    net.SGD(m.train_data, 30, 10, 3.0, test_data=m.test_data)
Пример #8
0
# fileName: mnsit
# author: xiaofu.qin
# create at 2017/12/17
# description:

import keras
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.optimizers import SGD
from keras.losses import categorical_crossentropy
from Mnist import Mnist

train_images = Mnist.extract_images("../mnist-data/train-images.idx3-ubyte")
train_labels = Mnist.extract_labels("../mnist-data/train-labels.idx1-ubyte")
train_labels = keras.utils.to_categorical(train_labels, num_classes=10)

test_images = Mnist.extract_images("../mnist-data/t10k-images.idx3-ubyte")
test_labels = Mnist.extract_labels("../mnist-data/t10k-labels.idx1-ubyte")
test_labels = keras.utils.to_categorical(test_labels, num_classes=10)

print("extracted images and labels")

# define the model
model = Sequential()

# add one dense layer
model.add(Dense(100, activation="relu", input_dim=784))
model.add(Dense(500, activation="relu"))
model.add(Dense(10, activation="softmax"))

# sgd = SGD(lr=0.02, momentum=0.95)