Example #1
0
win2 = 500 #Window for mov avg 2


######################## Preprocess data - dictionary gets loaded with first num_rfields MNIST images ############################
#################################### Training data starts loading after dictionary images ########################################
dict_data = mnist.load_images(image_file, num_rfields)
training_data = mnist.load_images(image_file, num_images, num_rfields)
dict_data = [np.array(i, dtype=float) / 255 for i in dict_data]
training_data = [np.array(i, dtype=float) / 255 for i in training_data]




######################################### Initialize network dictionary and parameters ###########################################
D = sp.build_dictionary(dict_data)
network = lca.r_network(D)
network.set_parameters(lamb, tau, delta, u_stop, t_type)
network.set_dim(dict_data[0].shape)
#Save out the original dictionary
network.save_dictionary(5, 10, dict1_path, line_color = 255)




#################################################### Train dictionary ############################################################
network.set_alpha(alpha)
network.load_ims(training_data)
network.train(alpha_decay, alpha_decay_rate, alpha_decay_iters)
network.plot_rmse(win1, win2)
network.plot_decay()
#Save out trained dictionary
Example #2
0
num_images  = 5000
start_pos = 50000
image_file = 'train-images.idx3-ubyte'
label_file = 'train-labels.idx1-ubyte'
image_data = mnist.load_images(image_file, num_images, start_pos)
label_data = mnist.load_labels(label_file, num_images, start_pos)

################## Scale data down before running through network ##############################
dict_data = pd.read_csv(dict_path, header = None)
dict_data = dict_data.values / 255.
for i in range(len(image_data)):
    image_data[i] = image_data[i].astype(float)
    image_data[i] /= 255.

################### Initialize Rozell network and set parameters ################################
rozell = lca.r_network(dict_data)
rozell.set_parameters(lamb, tau, delta, u_stop, t_type)

'''
################### Initialize random  matrix of weights for mapping ############################
################### sparse code to output layer.  Then train network #### ######################
weights = np.random.randn(10, 51)    #10 nodes in layer j+1 and 50 nodes in layer j
learn_rate = 0.5
error_plot = np.array([])
#pdb.set_trace()
counter = 0
for i, j in zip(image_data, label_data):
    counter += 1
    if (counter % 100 == 0):
        print ("Image #: " + str(counter))
    #Run Rozell and forwardprop
Example #3
0
num_patches = 3000
im_dims = (8,8,3)  #Patch shape
nat_image = ic.image_class(nat_path + '/' + 'city2.jpg')

## Get patches and set Lca variables
training_data = nat_image.slice_patches()[:num_patches]
random.shuffle(training_data)
X = np.zeros((num_patches, np.product(im_dims)))
for i in range(len(training_data)):
    X[i, :] = training_data[i].flatten()
net = Lca(num_rfields, tAlpha=0.8, tLambda=1.0)
net.init(np.product(im_dims),num_rfields)

## Use my Lca class to save pre dictionary
before = np.array(np.array(net._crossbar.copy()))
d1 = Lca_jack.r_network(np.array(net._crossbar))
d1.set_dim(im_dims)
d1.save_dictionary(5, 10, dict1_path)

## Run patches through Lca and train dictionary
MSEs = np.zeros((num_patches,))
for i in range(num_patches):
    patch = X[i, :].reshape(1, np.product(im_dims))
    recon = net.reconstruct(patch)
    resid = recon - patch
    MSEs[i] = np.mean(np.square(resid), axis=1)
    net.partial_fit(patch)
RMSEs = np.sqrt(MSEs)

## Use my Lca class to save post dictionary
after = np.array(net._crossbar.copy())
Example #4
0
########################### Select training and test sets ###############################

## If shuffled, data is randomly selected, otherwise
## selected data is the first num_images/num_timages in the set
if shuffle_data:
    random.shuffle(training_data)
    random.shuffle(test_data)

training_set = training_data[:num_images]
test_set = test_data[:num_timages]


########################## Initialze Lca and load dictionary ############################
dict_data = pandas.read_csv(dict_path, header=None)
Lca = r_network(dict_data.values)
Lca.set_parameters(lamb, tau, delta, u_stop, t_type)
Lca.set_dim(image_data[0].shape)

# Generate sparse codes for training and testing images, then run through NN
sparse_train = np.zeros((Lca.dictionary.shape[1], len(training_set)))
sparse_test = np.zeros((Lca.dictionary.shape[1], len(test_set)))
"""
# Generate training set
for i in range(len(training_set)):
    if (i + 1) % 100 == 0:
        print ('Sparse Train#: ', i+1)
    Lca.set_stimulus(training_set[i][0].T)  #Need to remember that Rojas uses row vecotrs.  Transpose before passing to Rozell
    Lca.generate_sparse()
    sparse_train[:,i] = Lca.a.flatten().copy()
# Save sparse codes for later use
Example #5
0
random.shuffle(training_data)



'''
for i in range(len(dict_data)):
    dict_data[i] = dict_data[i].astype(float)
    dict_data[i] /= 255.
'''

for i in range(len(training_data)):
    training_data[i] = training_data[i].astype(float)
    training_data[i] /= 255.

#Initialize network dictionary and parameters
network = lca.r_network(dict_data)
network.set_parameters(lamb, tau, delta, u_stop, t_type)
network.set_dim(im_dims)


################### Run each training image through network #######################################
################### For each image, generate sparse code then update trained ######################

#Print out the time and start the training process
#Save out the original dictionary
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
print("Start time: ", st)
before = network.save_dictionary(5, 10, dict1_path)

#Initiate x values and residual array for residual plot