def iterate_over_experiments_suite_span_output_demo_local(start_index, stop_index):
    ann = NeocorticalNetwork(49, 30, 49, 0.01, 0.9)

    for exp_index in range(start_index, stop_index):
        current_chaotic_patterns, current_pseudopatterns = \
            Tools.retrieve_patterns_for_consolidation(exp_index, exp_index%4 + 2)  # 2-5 looped
        training_set = []

        t0 = time.time()
        ann.reset()
        for cp_subset in current_chaotic_patterns:
            training_subset = []
            for cp in cp_subset:
                training_subset.append([cp[1], cp[1]])
            for i in range(15):
                ann.train(training_subset)

        results_line = 'Neocortical module consolidation. Output as IO. Exp#'+str(exp_index)+\
                       '\n'+str(i+1)+' iters: g='+str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index%4+2)))

        ann.reset()
        for cp_subset in current_chaotic_patterns:
            training_subset = []
            for cp in cp_subset:
                training_subset.append([cp[1], cp[1]])
            for i in range(200):
                ann.train(training_subset)

        results_line += '\n'+str(i+1)+' iters: g=' + str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index % 4 + 2)))
        t1 = time.time()
        print 'Trained and evaluated performance in '+'{:8.3f}'.format(t1-t0), 'seconds'
        print results_line
        Tools.append_line_to_log(results_line)

    return ann
def run_trials_for_patterns_per_output_on_subsets_sequential(patterns_per_output, distortion_P):
    # 20 trials per set size, 10 patterns per chaotically recalled output:
    for round_ctr in range(20):
        for set_size_ctr in range(2, 6):
            init_str = 'Performing perfect neocortical memory consolidation according to proposed distortion scheme' \
                       'for SUBSETS, i.e. with catastrophic interference. '+ \
                       '. Suite round#'+str(round_ctr)+'. Set size ='+str(set_size_ctr)+'.'
            print init_str
            Tools.append_line_to_log(init_str)

            ann.reset()
            training_set_10 = generate_training_set(
                set_size_ctr, training_patterns_associative, patterns_per_output=patterns_per_output,
                distortion_P=distortion_P)
            for subset_ctr in range(5):
                training_subset_10 = training_set_10[subset_ctr * set_size_ctr * patterns_per_output:
                    (subset_ctr + 1) * set_size_ctr * patterns_per_output]
                for i in range(15):  # training iterations
                    ann.train(training_subset_10)
            g_10 = NeocorticalMemoryConsolidation. \
                evaluate_goodness_of_fit(ann, training_patterns_associative[:2 * set_size_ctr])
            res_10_str = str(i+1) + ' training iterations, ' + str(patterns_per_output) + \
                         ' patterns per output, P='+str(distortion_P)+', goodness of fit, g=' + str(g_10)
            print res_10_str
            Tools.append_line_to_log(res_10_str)
def run_trials_for_patterns_per_output(patterns_per_output):
    # 20 trials per set size, 10 patterns per chaotically recalled output:
    for round_ctr in range(20):
        for set_size_ctr in range(2, 6):
            init_str = 'Performing perfect neocortical memory consolidation according to proposed distortion scheme. ' \
                       +'. Suite round#'+str(round_ctr)+'. Set size ='+str(set_size_ctr)+'.'
            print init_str
            Tools.append_line_to_log(init_str)

            training_set_10 = generate_training_set(set_size_ctr, training_patterns_associative,
                                                    patterns_per_output=patterns_per_output, distortion_P=0.1)
            ann.reset()
            for i in range(100):  # training iterations
                ann.train(training_set_10)
            g_10 = NeocorticalMemoryConsolidation. \
                evaluate_goodness_of_fit(ann, training_patterns_associative[:2*set_size_ctr])
            res_10_str = str(patterns_per_output)+' patterns per output, P=0.1, goodness of fit, g='+str(g_10)
            print res_10_str
            Tools.append_line_to_log(res_10_str)
def iterate_over_experiments_suite_halved_pseudopattern_size(start_index, stop_index, scheme_num):

    for exp_index in range(start_index, stop_index):
        current_chaotic_patterns, current_pseudopatterns = \
            Tools.retrieve_patterns_for_consolidation(exp_index, exp_index%4 + 2)  # 2-5 looped
        training_set = get_training_set_from_patterns_in_scheme_half_pseudopatterns(current_chaotic_patterns,
                                                                                    current_pseudopatterns, scheme_num)

        t0 = time.time()
        ann = get_ann_trained_on_patterns(training_patterns=training_set, training_iterations=15)
        results_line = 'Neocortical module consolidation. Halved pseudopattern set size. Scheme: '+str(scheme_num)+\
                       '. Exp#'+str(exp_index)+'\n15 iters: g='+\
                       str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index%4+2)))

        for i in range(200):
            ann.train(training_set)
        results_line += '\n1k iters: g=' + str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index % 4 + 2)))
        t1 = time.time()
        print 'Trained and evaluated performance in '+'{:8.3f}'.format(t1-t0), 'seconds'
        print results_line
        Tools.append_line_to_log(results_line)
def iterate_over_experiments_suite(start_index, stop_index, scheme_num):
    ann = NeocorticalNetwork(49, 30, 49, 0.01, 0.9)

    for exp_index in range(start_index, stop_index):
        current_chaotic_patterns, current_pseudopatterns = \
            Tools.retrieve_patterns_for_consolidation(exp_index, exp_index%4 + 2)  # 2-5 looped
        training_set = get_training_set_from_patterns_in_scheme_full_set(current_chaotic_patterns,
                                                                         current_pseudopatterns, scheme_num)
        t0 = time.time()
        ann.reset()
        for i in range(15):
            ann.train(training_set)
        results_line = 'Neocortical module consolidation. Scheme: '+str(scheme_num)+'. Exp#'+str(exp_index)+ \
                       '\n'+str(i+1)+' iters: g='+str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index%4+2)))

        ann.reset()
        for i in range(200):
            ann.train(training_set)
        results_line += '\n'+str(i+1)+' iters: g=' + str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index % 4 + 2)))
        t1 = time.time()
        print 'Trained and evaluated performance in '+'{:8.3f}'.format(t1-t0), 'seconds'
        print results_line
        Tools.append_line_to_log(results_line)
Esempio n. 6
0
    for i in range(40):
        ann_global = NeocorticalModuleTraining.global_sequential_FFBP_training(ss=set_size, training_iterations=200)
        ann_local = NeocorticalModuleTraining.traditional_training_with_catastrophic_interference(
            ss=set_size, training_iterations=200)

        # global_io_results = Tools.generate_recall_attempt_results_for_ann(ann_global, original_training_set)
        # local_io_results = Tools.generate_recall_attempt_results_for_ann(ann_local, original_training_set)
        #
        # Tools.save_aggregate_image_from_ios(global_io_results, 'global_aggregate_im', 0)
        # Tools.save_aggregate_image_from_ios(local_io_results, 'local_aggregate_im', 1)

        global_goodness = NeocorticalMemoryConsolidation.evaluate_goodness_of_fit(ann_global, original_training_set)
        local_goodness = NeocorticalMemoryConsolidation.evaluate_goodness_of_fit(ann_local, original_training_set)
        global_gs.append(global_goodness)
        local_gs.append(local_goodness)

        log_line = 'EVALUATED baseline. g\'s - ' + 'global: ' + str(global_goodness) + ', local: ' + str(local_goodness)
        print log_line
        Tools.append_line_to_log(log_line)

    avg_global_g = Tools.get_avg(global_gs)
    avg_local_g = Tools.get_avg(local_gs)

    avgs_global.append(avg_global_g)
    avgs_local.append(avg_local_g)

    final_result_line = 'Final results for current set size: global avg. = ' + str(avg_global_g) + ', local avg. = ' + \
                        str(avg_local_g)
    print final_result_line
    Tools.append_line_to_log(final_result_line)