shuffle=True) return trainloader, dim, number_rows_data # - def large_cycle(map_, training_data): basic_visualization(map_display(map_.map)) print(map_display(map_.map)) for i in range(number_iterations): cycle(map_, training_data) basic_visualization(map_display(map_.map)) print(map_display(map_.map)) training, dim, number_rows_data = load_data(data) map1 = MapClass(length, width, dim, move_closer_coef, number_iterations) map1.weights_to_map() map1.step(training, verbose=True) map1.cycle(training, verbose=True) map1.classify_all(map1.convert_data_tensor(data)) map1.convert_data_tensor(data)
if dim == 1: return map_.view(length, width) else: return map_.view(dim, length, width) def large_cycle(map_, training_data): basic_visualization(map_display(map_.map)) print(map_display(map_.map)) for i in range(number_iterations): cycle(map_, training_data) basic_visualization(map_display(map_.map)) print(map_display(map_.map)) training = load_data(data) map1 = MapClass(length, width, dim, move_closer_coef) map1.map map1.cycle(training) map1.map map1.distance_matrix map1.impact_matrix basic_visualization(map1.map)
# + # Network configuration data = rgb_colors data_lables = color_names batch_size = 2 length = 10 width = 10 number_epochs = 100 shuffle = True learning_rate = 0.01 # - map1 = MapClass(data, length, width, learning_rate, number_epochs, matrix1, data_lables, batch_size, shuffle) # + # map1.weights # + # training, dim, number_rows_data = load_data(data, batch_size) # - plt.rcParams['figure.dpi'] = 150 map1.large_cycle(draw_every_epoch=10, rgb=True) map1.cycle()