Example #1
0
def load_mnist(dataset="train", digits=np.arange(10), path=".", size=60000):

    train_set, test_set = data.load_mnist(f'{path}/mnist.pkl.gz')
    if dataset == "train":
        img, lbl = train_set
        num, n_pixels = img.shape
        n_dim = int(math.sqrt(n_pixels))
        rows, cols = n_dim, n_dim
    elif dataset == "test":
        img, lbl = test_set
        num, n_pixels = img.shape
        n_dim = int(math.sqrt(n_pixels))
        rows, cols = n_dim, n_dim
    else:
        raise ValueError("dataset must be 'test' or 'train'")

    N = size * len(digits)

    images = np.zeros((N, rows, cols), dtype='float32')
    labels = np.zeros((N, 1), dtype='int8')

    for i, label in enumerate(digits):
        ind = []
        for j, l in enumerate(lbl):
            if len(ind) >= size:
                break

            if l == label:
                ind.append(j)

        for j in range(size):  #int(len(ind) * size/100.)):
            images[i * size + j] = np.array(img[ind[j]].reshape((rows, cols)))
            labels[i * size + j] = lbl[ind[j]]

    labels = np.array([label[0] for label in labels])

    rand = np.random.permutation(N)
    labels = labels[rand]
    images = images[rand]

    return images, labels
Example #2
0

def one_hot(labels, c=None):
    assert labels.ndim == 1
    n = labels.shape[0]
    c = len(np.unique(labels)) if c is None else c
    y = np.zeros((n, c))
    y[np.arange(n), labels] = 1
    return y


rng = np.random.RandomState(9)


# --- load the data
(X_train, y_train), (X_test, y_test) = load_mnist()

X_train = 2 * X_train - 1  # normalize to -1 to 1
X_test = 2 * X_test - 1  # normalize to -1 to 1

train_targets = one_hot(y_train, 10)
test_targets = one_hot(y_test, 10)

# --- set up network parameters
n_vis = X_train.shape[1]
n_out = train_targets.shape[1]
# n_hid = 300
n_hid = 1000
# n_hid = 3000

# encoders = rng.normal(size=(n_hid, 11, 11))
Example #3
0
from hunse_thesis.neurons import static_f_df, linear, dlinear, relu, drelu
from hunse_thesis.offline_learning import (Network, BPLearner, FALearner,
                                           FALocalLearner, FASkipLearner,
                                           make_flat_batch_fn,
                                           make_random_batch_fn)
from hunse_thesis.offline_learning import (nll_cost_on_inds,
                                           class_error_on_inds)

from hunse_thesis.utils import orthogonalize, initial_weights, initial_w

sns.set_style('white')
sns.set(context='paper', style='ticks', palette='dark')

# --- problem dataset
(trainX, trainY), (testX, testY) = load_mnist('~/data/mnist.pkl.gz')
labels = np.unique(trainY)
n_labels = len(labels)

# trainX, trainY = trainX[:100], trainY[:100]  # quick training set
# trainX, trainY = trainX[:1000], trainY[:1000]  # quick training set
# trainX, trainY = trainX[:10000], trainY[:10000]  # quick training set


def preprocess(images):
    images[:] *= 2
    images[:] -= 1


preprocess(trainX), preprocess(testX)
Example #4
0
from src.Neuron.LIF import LIF
from src.Log.DataLog import DataLog
from nengo_extras.data import load_mnist
from src.LearningRule.simplified_stdp import STDP
from src.Input.InputData import PresentInputWithPause
from src.Log.Heatmap import HeatMapSave, AllHeatMapSave

#############################
# load the data
#############################

img_rows, img_cols = 28, 28
input_nbr = 1000
Dataset = "Mnist"
(image_train, label_train), (image_test,
                             label_test) = load_mnist("mnist.pkl.gz")

image_train_filtered = []
label_train_filtered = []

for i in range(0, input_nbr):
    image_train_filtered.append(image_train[i])
    label_train_filtered.append(label_train[i])

print("actual input", len(label_train_filtered))

image_train_filtered = np.array(image_train_filtered)
label_train_filtered = np.array(label_train_filtered)

#############################
Example #5
0
#############################
# load the data
#############################

try:
    NetworkInfo = sys.argv[1]
except Exception:
    print("Make sure to pass the network arguments !")
    exit(0)


input_nbr = 10000
classes = 10

(image_train, label_train), (image_test, label_test) = load_mnist("mnist.pkl.gz")

image_test_filtered = []
label_test_filtered = []

for i in range(0,input_nbr):
        image_test_filtered.append(image_test[i])
        label_test_filtered.append(label_test[i])

image_test_filtered = np.array(image_test_filtered)
label_test_filtered = np.array(label_test_filtered)

#############################
model = nengo.Network("My network")
#############################
Example #6
0
    Encoder, ShallowNetwork, FASkipNetwork, FATwoStepNetwork)
from hunse_thesis.utils import initial_weights

from plot_online_mnist import show_all_plots, print_test_errors

def eye_encoders(d):
    return Tile(np.vstack((np.eye(d), -np.eye(d))))

warnings.filterwarnings('ignore', message='This learning rate is very high')

# rng = np.random.RandomState(9)
rng = np.random.RandomState(8)
# rng = np.random

# --- data
mnist = load_mnist('~/data/mnist.pkl.gz')
(Xtrain, Ytrain), (Xtest, Ytest) = mnist

labels = np.unique(Ytrain)
n_labels = len(labels)

def preprocess(images):
    images[:] *= 2
    images[:] -= 1

preprocess(Xtrain), preprocess(Xtest)
Ttrain = one_hot_from_labels(Ytrain, classes=n_labels)
Ttest = one_hot_from_labels(Ytest, classes=n_labels)

# --- params
# dhids = [600, 300]
Example #7
0
from nengo_extras.data import load_mnist, one_hot_from_labels
from nengo_extras.vision import Gabor, Mask, ciw_encoders
# from nengo_extras.vision import Gabor, Mask, ciw_encoders, cd_encoders_biases

import hunse_thesis.solvers
# from hunse_thesis.vision import (
#     percentile_encoders_intercepts, scale_encoders_intercepts)

rng = np.random.RandomState(1)

# --- load data
s_in = (28, 28)
n_in = np.prod(s_in)
n_out = 10

train, test = load_mnist('~/data/mnist.pkl.gz')
# train = (train[0][:1000], train[1][:1000])
# train = (train[0][:10000], train[1][:10000])

train_images, train_labels = train
test_images, test_labels = test
for images in [train_images, test_images]:
    images[:] = 2 * images - 1  # normalize to -1 to 1

train_targets = one_hot_from_labels(train_labels, classes=10)
test_targets = one_hot_from_labels(test_labels, classes=10)

assert train_images.shape[1] == n_in
assert train_targets.shape[1] == n_out

# --- network
Example #8
0
def test_load_mnist(plt):
    trainX = load_mnist()[0][0]
    tile(trainX.reshape((-1, 28, 28)), ax=plt.gca())
Example #9
0
from src.Neuron.LIF import LIF

#############################
# load the data
#############################

np.random.seed(0)

NetworkInfo = sys.argv[1]

input_nbr = 10000
class_input_nbr = 1000

classes = 10

(image_train, label_train), (image_test, label_test) = load_mnist()

image_test_filtered = []
label_test_filtered = []

class_image_filtered = []
class_label_filtered = []

for i in range(0,input_nbr):
        image_test_filtered.append(image_test[i])
        label_test_filtered.append(label_test[i])

image_test_filtered = np.array(image_test_filtered)
label_test_filtered = np.array(label_test_filtered)

for i in range(0,10):
Example #10
0
def test_load_mnist(plt):
    (trainX, _), (_, _) = load_mnist()
    tile(trainX.reshape(-1, 28, 28))