def test_one_hot_from_labels_int(rng): nc = 19 labels = rng.randint(nc, size=1000) yref = np.zeros((len(labels), nc)) yref[np.arange(len(labels)), labels] = 1 y0 = one_hot_from_labels(labels) y1 = one_hot_from_labels(labels, classes=nc+5) assert np.array_equal(y0, yref) assert np.array_equal(y0, y1[:, :nc]) assert (y1[:, nc:] == 0).all()
def test_one_hot_from_labels_int(rng): nc = 19 labels = rng.randint(nc, size=1000) yref = np.zeros((len(labels), nc)) yref[np.arange(len(labels)), labels] = 1 y0 = one_hot_from_labels(labels) y1 = one_hot_from_labels(labels, classes=nc + 5) assert np.array_equal(y0, yref) assert np.array_equal(y0, y1[:, :nc]) assert (y1[:, nc:] == 0).all()
def test_one_hot_from_labels_skip(rng): labels = 2*rng.randint(4, size=1000) yref = np.zeros((len(labels), labels.max()+1)) yref[np.arange(len(labels)), labels] = 1 y = one_hot_from_labels(labels) assert np.array_equal(y, yref)
def test_one_hot_from_labels_skip(rng): labels = 2 * rng.randint(4, size=1000) yref = np.zeros((len(labels), labels.max() + 1)) yref[np.arange(len(labels)), labels] = 1 y = one_hot_from_labels(labels) assert np.array_equal(y, yref)
def test_one_hot_from_labels_float(rng): classes = rng.uniform(0, 9, size=11) inds = rng.randint(len(classes), size=1000) labels = classes[inds] yref = np.zeros((len(labels), len(classes))) yref[np.arange(len(labels)), inds] = 1 y = one_hot_from_labels(labels, classes=classes) assert np.array_equal(y, yref)
tau_rc=tau_rc, amp=amp, learners=learners), fh) print("Saved %r" % filename) else: with open(filename, 'rb') as fh: filedata = dill.load(fh) globals().update(filedata) # --- sanity test (to check that everything saved fine) # for learner in learners: # errors = learner.test(test_set) # print('%s: %0.3f' % (learner.name, errors.mean())) # --- plot weighted selectivity (rows=learners, cols=layers, traces=neurons) testT = one_hot_from_labels(testY, classes=10).astype(float) testW = testT / testT.sum(0) test_outs = [learner.network.forward(testX)[1] for learner in learners] # fig_sel = plt.figure(figsize=(6.4, 3.5)) # rows = len(learners) # cols = learners[0].network.n_layers # for i, outs in enumerate(test_outs): # # outs = outs[1:-1] # outs = outs[1:] # outs[-1][:] = softmax(outs[-1], axis=1) # for j, h in enumerate(outs): # h = h / (h.sum(0) + 1e-16) # R = np.dot(h.T, testT) # responses: neurons x classes
# n_hid = 6000 # n_hid = 12000 # n_hid = 15000 # n_hid = 21000 # --- load the data print("Loading data (augment=%r)" % (augment, )) train, test = load_cifar10('~/data/cifar-10-python.tar.gz') train_images, train_labels = train test_images, test_labels = test train_images = (1. / 128) * train_images - 1 test_images = (1. / 128) * test_images - 1 del train, test train_targets = one_hot_from_labels(train_labels, classes=10) test_targets = one_hot_from_labels(test_labels, classes=10) assert train_images.ndim == test_images.ndim == 2 n_vis = train_images.shape[1] n_out = train_targets.shape[1] shape = (3, 32, 32) c = shape[0] assert np.prod(shape) == n_vis per_batch = 10000 if not augment: def batches(): return itertools.izip(train_images.reshape(-1, per_batch, n_vis), train_targets.reshape(-1, per_batch, n_out))
rng = np.random.RandomState(8) # rng = np.random # --- data mnist = load_mnist('~/data/mnist.pkl.gz') (Xtrain, Ytrain), (Xtest, Ytest) = mnist labels = np.unique(Ytrain) n_labels = len(labels) def preprocess(images): images[:] *= 2 images[:] -= 1 preprocess(Xtrain), preprocess(Xtest) Ttrain = one_hot_from_labels(Ytrain, classes=n_labels) Ttest = one_hot_from_labels(Ytest, classes=n_labels) # --- params # dhids = [600, 300] dhids = [500, 500] if 1: # epochs = 0.1 # epochs = 0.5 # epochs = 1.5 # epochs = 3 epochs = 6 # epochs = 10 # epochs = 15
# --- load data s_in = (28, 28) n_in = np.prod(s_in) n_out = 10 train, test = load_mnist('~/data/mnist.pkl.gz') # train = (train[0][:1000], train[1][:1000]) # train = (train[0][:10000], train[1][:10000]) trainX, trainY = train testX, testY = test for images in [trainX, testX]: images[:] = 2 * images - 1 # normalize to -1 to 1 trainT = one_hot_from_labels(trainY, classes=10) testT = one_hot_from_labels(testY, classes=10) assert trainX.shape[1] == n_in assert trainT.shape[1] == n_out # --- NEF network # filename = None # filename = 'combined_offline_mnist_nef.dil' # filename = 'combined_offline_mnist_test.dil' # filename = 'combined_offline_mnist_sgd25.dil' # filename = 'combined_offline_mnist_sgd25vanilla.dil' # filename = 'combined_offline_mnist_sgd50vanilla.dil' # filename = 'combined_offline_mnist_sgd50vanillareset.dil' filename = 'combined_offline_mnist_sgd50nesterov.dil'