Exemple #1
0
from lib.data import mnist
from lib.fnn.shallowNet import ShallowNet
from lib.fnn.performance import trainTestNetwork
import lib.nonlinFunctions as nonlinFunctions

# Load data
dataMNIST = mnist.load("/home/aleksejs/Downloads/mnist_data/")

# Set parameters
param = {
    # Network parameters
    'netClass': ShallowNet,
    'ny': 10,  # Number of possible outputs (for binarization)
    'nHid': [],  # no hidden layers
    'nonlinFunc': nonlinFunctions.func_relu,
    'nonlinPrimFunc': nonlinFunctions.fprim_relu,
    'bSTD': 10**-4,  # variance of initial thresholds
    'wSTD': 10**-4,  # variance of initial weights

    # Testing parameters
    'etaPref': 10**-8,  # Prefactor for learning rate
    'nEpoch': 100,  # Number of times to sweep the entire data
    'nMini': 32,  # Number of datapoints per minibatch
}

trainTestNetwork(*dataMNIST.values(), param)
Exemple #2
0
import numpy as np
import matplotlib.pyplot as plt
from lib.data import mnist
from lib.autoenc.rbdeep import RaoBallardDeep
import lib.nonlinFunctions as nonlinFunctions

# Load data
dataMNIST = mnist.load("/home/alyosha/Downloads/mnist_data/")
nData = 5
dataIdxs = np.arange(nData)
xarr = dataMNIST['train_images'][dataIdxs].astype(float)
nPix = xarr.shape[1] * xarr.shape[2]
for i in range(nData):
    xarr[i] /= np.linalg.norm(xarr[i])

sig = nonlinFunctions.func_id
sigp = nonlinFunctions.fprim_id

# Specify parameters
param = {
    'nNode': [nPix, 5],
    'dt': 0.1,  # ms,  timestep
    'tauX': 1.0,  # ms,  neuronal timescale
    'tauU': 100.0,  # ms,  plasticity timescale
    'inputNoise': 0.0,
    'uNorm': 1.0,
    'nonlinFunc': [sig, sig],
    'nonlinPrimFunc': [sigp, sigp]
}

# Run
Exemple #3
0
from lib.data import mnist

mnistData = mnist.load("/home/aleksejs/Downloads/mnist_data/")

select_images = [0, 1, 15, 138, 2000]

mnist.plot(mnistData["train_images"], mnistData["train_labels"], select_images)