def get_ann_trained_on_patterns(training_patterns, training_iterations): ann = NeocorticalNetwork(49, 30, 49, 0.01, 0.9) # training: for x in xrange(training_iterations): ann.train(training_patterns) return ann
def global_sequential_FFBP_training(ss, training_iterations): io_dim = 49 ann = NeocorticalNetwork(io_dim, 30, io_dim, 0.01, 0.9) training_set = training_patterns_associative[:5*ss] for i in range(training_iterations): ann.train(training_set) return ann
def iterate_over_experiments_suite_span_output_demo_local(start_index, stop_index): ann = NeocorticalNetwork(49, 30, 49, 0.01, 0.9) for exp_index in range(start_index, stop_index): current_chaotic_patterns, current_pseudopatterns = \ Tools.retrieve_patterns_for_consolidation(exp_index, exp_index%4 + 2) # 2-5 looped training_set = [] t0 = time.time() ann.reset() for cp_subset in current_chaotic_patterns: training_subset = [] for cp in cp_subset: training_subset.append([cp[1], cp[1]]) for i in range(15): ann.train(training_subset) results_line = 'Neocortical module consolidation. Output as IO. Exp#'+str(exp_index)+\ '\n'+str(i+1)+' iters: g='+str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index%4+2))) ann.reset() for cp_subset in current_chaotic_patterns: training_subset = [] for cp in cp_subset: training_subset.append([cp[1], cp[1]]) for i in range(200): ann.train(training_subset) results_line += '\n'+str(i+1)+' iters: g=' + str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index % 4 + 2))) t1 = time.time() print 'Trained and evaluated performance in '+'{:8.3f}'.format(t1-t0), 'seconds' print results_line Tools.append_line_to_log(results_line) return ann
def traditional_training_with_catastrophic_interference(ss, training_iterations): io_dim = 49 ann = NeocorticalNetwork(io_dim, 30, io_dim, 0.01, 0.9) training_set = training_patterns_associative[:25] # ss = 2 for i in range(5): for training_iteration in range(training_iterations): ann.train(training_set[i*ss:i*ss+ss]) # for j in range(ss*5): # Tools.show_image_from(ann.get_IO(training_set[j][0])[1]) return ann
def train_on_chaotic_patterns(): chaotic_outs, rand_ins = retrieve_chaotic_patterns_from_exp_num(1) # print "len(chaotic_outs):", len(chaotic_outs) chaotic_patts = [] for i in range(len(chaotic_outs)): chaotic_patts.append([]) for j in range(len(chaotic_outs[i])): chaotic_patts[i].append([rand_ins[i][j], chaotic_outs[i][j]]) io_dim = 49 ann = NeocorticalNetwork(io_dim, 30, io_dim, 0.01, 0.9) training_set = [] for i in range(5): training_set += chaotic_patts[i] ann.train(chaotic_patts[i]) for train_iters in range(10): pass return ann
def iterate_over_experiments_suite(start_index, stop_index, scheme_num): ann = NeocorticalNetwork(49, 30, 49, 0.01, 0.9) for exp_index in range(start_index, stop_index): current_chaotic_patterns, current_pseudopatterns = \ Tools.retrieve_patterns_for_consolidation(exp_index, exp_index%4 + 2) # 2-5 looped training_set = get_training_set_from_patterns_in_scheme_full_set(current_chaotic_patterns, current_pseudopatterns, scheme_num) t0 = time.time() ann.reset() for i in range(15): ann.train(training_set) results_line = 'Neocortical module consolidation. Scheme: '+str(scheme_num)+'. Exp#'+str(exp_index)+ \ '\n'+str(i+1)+' iters: g='+str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index%4+2))) ann.reset() for i in range(200): ann.train(training_set) results_line += '\n'+str(i+1)+' iters: g=' + str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index % 4 + 2))) t1 = time.time() print 'Trained and evaluated performance in '+'{:8.3f}'.format(t1-t0), 'seconds' print results_line Tools.append_line_to_log(results_line)