Ejemplo n.º 1
0
alpha = 0.85
alpha_decay = 1
alpha_decay_rate = 0.999
alpha_decay_iters = 1
num_rfields = 50
num_images =  5000      #60000 - num_rfields
image_file = 'train-images.idx3-ubyte'    #'t10k-images.idx3-ubyte'

#Plotting parameters
win1 = 100  #Window for mov avg 1
win2 = 500 #Window for mov avg 2


######################## Preprocess data - dictionary gets loaded with first num_rfields MNIST images ############################
#################################### Training data starts loading after dictionary images ########################################
dict_data = mnist.load_images(image_file, num_rfields)
training_data = mnist.load_images(image_file, num_images, num_rfields)
dict_data = [np.array(i, dtype=float) / 255 for i in dict_data]
training_data = [np.array(i, dtype=float) / 255 for i in training_data]




######################################### Initialize network dictionary and parameters ###########################################
D = sp.build_dictionary(dict_data)
network = lca.r_network(D)
network.set_parameters(lamb, tau, delta, u_stop, t_type)
network.set_dim(dict_data[0].shape)
#Save out the original dictionary
network.save_dictionary(5, 10, dict1_path, line_color = 255)
Ejemplo n.º 2
0

################### Set parameters ##############################################################
lamb = 3.
tau = 10.0
delta = 0.01
u_stop = 0.001
t_type = 'S'
alpha = 0.01
num_iterations =  10000

##Load initial dictionary, either random or image
##Save out initial dictionary as image
image_file = 't10k-images.idx3-ubyte'  #'train-images.idx3-ubyte'
#dict_data = np.random.rand(28,28)  #Use random init
dict_data =  mnist.load_images(image_file, 1, 1)[0]
og_dict = Image.fromarray(dict_data)
og_dict.save(dict1_path)
training_data = mnist.load_images(image_file, 1, 15)[0]
og_im_data = training_data.copy()
training_data = training_data.astype(float)
training_data /= 255.

#Initialize network dictionary, then save stimulus and orig dict
dict_data = dict_data.astype(float)
dict_data /= 255.
network = lca.r_network(dict_data.flatten())
network.set_parameters(lamb, tau, delta, u_stop, t_type)
network.set_stimulus(training_data.flatten(), True)
og_im = Image.fromarray(og_im_data)
og_im.save(file_path + '/stim.png')
Ejemplo n.º 3
0
alpha = 1.
num_stims = 1
num_nodes = 5
num_dict_rows = 5
num_dict_cols = int(num_nodes / num_dict_rows)
num_stim_rows = 1
num_stim_cols = int(num_stims / num_stim_rows)
num_iterations = 1000
image_file =  'train-images.idx3-ubyte'  #'t10k-images.idx3-ubyte'  'train-images.idx3-ubyte'
#plot_detail = '/[' + str(num_stims) + ' stim, alpha = ' + str(alpha) + ']'
#plot_path = file_path + plot_detail + '.png'

##Load initial dictionary, either random or image
##Save out initial dictionary as image
#dict_data = np.random.rand(28,28)  #Use random init
dict_data =  mnist.load_images(image_file, num_nodes, num_stims)
training_data = mnist.load_images(image_file, num_stims)
for i in range(len(dict_data)):
    dict_data[i] = dict_data[i].astype(float)
    dict_data[i] /= 255.
for i in range(len(training_data)):
    training_data[i] = training_data[i].astype(float)
    training_data[i] /= 255.

D = sp.build_dictionary(dict_data)
#D = np.random.rand(784, num_nodes)   #Multiple node dictionary randomly initialized
#T = sp.build_dictionary(training_data)

#####Save stimulus out if needed
##For single stimulus case
'''
Ejemplo n.º 4
0
files = os.listdir()
file_list = []
for i in files:    
    if (i.find("idx") != -1):
        file_list.append(i)
'''   

##############################Check print_meta function#################################################################
'''
for name in (file_list):
    mnist.print_meta(name)
'''


##############################Check load_images, load_labels, and save_images#########################################################

num_images = 20
start_pos = 0
image_file = 'train-images.idx3-ubyte'
label_file = 'train-labels.idx1-ubyte'
output_path = file_path + '\\Git_Repos\\jc2\\MNIST_Load\\Images\\test_image_'
mnist.print_meta(image_file)

image_data = mnist.load_images(image_file, num_images, start_pos)
label_data = mnist.load_labels(label_file, num_images, start_pos)
mnist.save_images(image_data, output_path)
print (label_data)



Ejemplo n.º 5
0
tlabel_file = "t10k-labels.idx1-ubyte"  # Test labels
num_images = 5000  # Number of training images
num_timages = 5000  # Number of test images


#################################### Set Rozell params ################################################
lamb = 0.0
tau = 10.0
delta = 0.01
u_stop = 0.001
t_type = "S"
alpha = 0.85

############################ Load all MNIST images and labels #########################################

image_data = mnist.load_images(image_file, num_images, 5000)
label_data = mnist.load_labels(label_file, num_images, 5000)
timage_data = mnist.load_images(timage_file, num_timages)
tlabel_data = mnist.load_labels(tlabel_file, num_timages)
if len(image_data) != len(label_data):
    print("TRAINING DATA ERROR: Num of images doesn't match num of labels!!!!!")
if len(timage_data) != len(tlabel_data):
    print("TEST DATA ERROR: Num of images doesn't match num of labels!!!!!")

############################### Build training data ###################################

images = []
onehot_labels = []
numeric_labels = []
for i in range(len(image_data)):
    image_data[i] = image_data[i].astype(float)
Ejemplo n.º 6
0
##Big laptop
os.chdir("C:\\Users\\Jack2\\Google Drive\\URMP\\jc2\\MNIST_Load")
##Little laptop
#os.chdir("C:\\Users\\Jack\\Google Drive\\URMP\\jc2\\MNIST_Load")

files = os.listdir()
file_list = []
for i in files:    
    if (i.find("idx") != -1):
        file_list.append(i)

########################################################################################################################

file_path = file_list[0]
dictionary_data = mnist.load_images(file_path,50)
y = mnist.load_images(file_list[0], 1, 100)[0].flatten()
D = sp.build_dictionary(dictionary_data)
#print ("D.shape: ", D.shape)
approx = None

##Generate sparse code
current_beta = None
next_beta = None
indices = None
condition = True

while condition:
    current_beta, indices = sp.choose_atoms(D, y, indices)
    next_beta, indices = sp.choose_atoms(D, y, indices)
    print (current_beta.shape, next_beta.shape)
Ejemplo n.º 7
0

##############################Test return_sparse##########################################################################################

#Set parameters
lamb = 8
tau = 10
delta = 0.001
u_stop = 0.001
t_type = 'S'
num_images = 1


#Load MNIST dictionary and signal
image_file = 't10k-images.idx3-ubyte'  #'train-images.idx3-ubyte'
signal_data = mnist.load_images(image_file, num_images, 9000)
dict_data = mnist.load_images(image_file, 50, 1)
#dict_data = pandas.read_csv('trained_data.csv', header=None, names=None).values

signal_data[0] = signal_data[0].astype(float)
signal_data[0] = np.multiply(signal_data[0], 1/255.)
for i in range(len(dict_data)):
    dict_data[i] = dict_data[i].astype(float)
    dict_data[i] /= 255.
D = sp.build_dictionary(dict_data)
#D = dict_data

#Run Rozell and generate sparse code
network = lca.r_network(D)
network.set_parameters(lamb, tau, delta, u_stop, t_type)
lambdas = [lamb]