# produce parameter replacement dict
replace = {
    "tau_syn_E": 0.2,
    "tau_syn_I": 0.2,
    "v_thresh": 1.,
}
output_v = []
populations, projections, custom_params = restore_simulator_from_file(
    sim, args.model,
    is_input_vrpss=True,
    vrpss_cellparams=input_params,
    replace_params=replace)
sim.set_number_of_neurons_per_core(SpikeSourcePoissonVariable, 16)
sim.set_number_of_neurons_per_core(sim.SpikeSourcePoisson, 16)
sim.set_number_of_neurons_per_core(sim.IF_curr_exp, 64)
set_i_offsets(populations, runtime)

# if args.test_with_pss:
#     pss_params = {
#         'rate'
#     }
#     populations.append(sim.Population(sim.SpikeSourcePoisson, ))
# set up recordings for other layers if necessary
for pop in populations[:]:
    pop.record("spikes")
if args.record_v:
    populations[-1].record("v")
spikes_dict = {}
neo_spikes_dict = {}
sim.run(runtime)
for pop in populations[:]:
Exemplo n.º 2
0
def run(args, start_index):
    # Record SCRIPT start time (wall clock)
    start_time = plt.datetime.datetime.now()

    # Note that this won't be global between processes
    global try_number
    try_number += 1
    globals_variables.unset_simulator()
    signal.signal(signal.SIGINT, signal_handler)
    current = multiprocessing.current_process()
    print('Started {}'.format(current) + '\n')

    f_name = "errorlog/" + current.name + "_stdout.txt"
    g_name = "errorlog/" + current.name + "_stderror.txt"
    f = open(f_name, 'w')
    g = open(g_name, 'w')
    old_stdout = sys.stdout
    old_stderr = sys.stderr
    # sys.stdout = f
    # sys.stderr = g

    N_layer = 28**2  # number of neurons in input population
    t_stim = args.t_stim
    (x_train, y_train), (x_test, y_test) = mnist.load_data()
    # reshape input to flatten data

    x_test = x_test.reshape(x_test.shape[0], np.prod(x_test.shape[1:]))

    testing_examples = args.chunk_size
    simtime = testing_examples * t_stim
    range_of_slots = np.arange(testing_examples)
    starts = np.ones((N_layer, testing_examples)) * (range_of_slots * t_stim)
    durations = np.ones((N_layer, testing_examples)) * t_stim
    rates = x_test[start_index:start_index + args.chunk_size, :].T
    y_test = y_test[start_index:start_index + args.chunk_size]

    # scaling rates
    _0_to_1_rates = rates / float(np.max(rates))
    rates = _0_to_1_rates * args.rate_scaling

    input_params = {"rates": rates, "durations": durations, "starts": starts}

    print("Number of testing examples to use:", testing_examples)
    print("Min rate", np.min(rates))
    print("Max rate", np.max(rates))
    print("Mean rate", np.mean(rates))

    replace = None

    timestep = args.timestep
    timescale = args.time_scale_factor

    output_v = []
    sim.setup(timestep, timestep, timestep, time_scale_factor=timescale)

    print("Setting number of neurons per core...")

    sim.set_number_of_neurons_per_core(SpikeSourcePoissonVariable, 16)
    sim.set_number_of_neurons_per_core(sim.SpikeSourcePoisson, 16)
    sim.set_number_of_neurons_per_core(sim.IF_curr_exp, 64)
    sim.set_number_of_neurons_per_core(sim.IF_cond_exp, 64)

    print("Restoring populations and projections...")
    populations, projections, extra_params = restore_simulator_from_file(
        sim,
        args.model,
        input_type='vrpss',
        vrpss_cellparams=input_params,
        replace_params=replace)

    def add_correlation_population(sim, populations, projections):
        print("Adding a input/output correlation population...")
        #Define neuron model for population (long intergration time) propably the same as the others
        corr_neuron_model_params = populations[1].celltype.default_parameters
        input_size = populations[0].size
        # Make a population that correlates input and output
        corr_pop = sim.Population(input_size,
                                  cellclass=sim.IF_cond_exp(),
                                  label='corr_pop')
        # Add it to populations
        populations.append(corr_pop)

        #Weight for just one spike

        low_weight = 0.01
        weight = 0.1

        #Proj from input (remember delay)
        #Add to projections
        projections.append(
            sim.Projection(populations[0],
                           corr_pop,
                           sim.OneToOneConnector(),
                           sim.StaticSynapse(weight=low_weight,
                                             delay=len(populations) - 1),
                           receptor_type='excitatory'))
        #Proj from output classes
        #Add to projections
        from_list = [(7, x, weight, 0) for x in range(input_size)]
        projections.append(
            sim.Projection(populations[-2],
                           corr_pop,
                           sim.FromListConnector(from_list),
                           receptor_type='excitatory'))
        return populations, projections

    populations, projections = add_correlation_population(
        sim, populations, projections)

    old_runtime = extra_params['simtime'] if 'simtime' in extra_params else None
    print("Setting i_offsets...")
    set_i_offsets(populations, simtime, old_runtime=old_runtime)

    spikes_dict = {}
    neo_spikes_dict = {}
    current_error = None
    final_connectivity = {}

    def reset_membrane_voltage():
        #This doesn't work see SpyNNaker8 GitHub issue #331 use set_initial_value branches
        for population in populations[1:]:
            population.set_initial_value(variable="v", value=0)
        return

    for pop in populations[:]:
        pop.record("spikes")
    if args.record_v:
        populations[-1].record("v")

    sim_start_time = plt.datetime.datetime.now()
    if not args.reset_v:
        print('Presenting examples {}:{}'.format(
            start_index, start_index + testing_examples))
        sim.run(simtime)

    for i in range(args.chunk_size):
        print('Presenting example {}/{}'.format(start_index + i,
                                                testing_examples))
        sim.run(t_stim)
        reset_membrane_voltage()

    # Compute time taken to reach this point
    end_time = plt.datetime.datetime.now()
    total_time = end_time - start_time
    sim_total_time = end_time - sim_start_time

    for pop in populations[:]:
        spikes_dict[pop.label] = pop.spinnaker_get_data('spikes')
    if args.record_v:
        output_v = populations[-1].spinnaker_get_data('v')

    #The following can take a long time and so is added through a flag
    if args.retrieve_connectivity:
        try:
            for proj in projections:
                try:
                    final_connectivity[proj.label] = \
                        np.array(proj.get(('weight', 'delay'), format="list")._get_data_items())
                except AttributeError as ae:
                    print(
                        "Careful! Something happened when retrieving the "
                        "connectivity:", ae,
                        "\nRetrying using standard PyNN syntax...")
                    final_connectivity[proj.label] = \
                        np.array(proj.get(('weight', 'delay'), format="list"))
                except TypeError as te:
                    print("Connectivity is None (", te, ") for connection",
                          proj.label)
                    print("Connectivity as empty array.")
                    final_connectivity[proj.label] = np.array([])
        except:
            traceback.print_exc()
            print("Couldn't retrieve connectivity.")

    if args.result_filename:
        results_filename = args.result_filename
    else:
        results_filename = "mnist_results"
        if args.suffix:
            results_filename += args.suffix
        else:
            pass

    # Retrieve simulation parameters for provenance tracking and debugging purposes
    sim_params = {
        "argparser": vars(args),
        "git_hash": retrieve_git_commit(),
        "run_end_time": end_time.strftime("%H:%M:%S_%d/%m/%Y"),
        "wall_clock_script_run_time": str(total_time),
        "wall_clock_sim_run_time": str(sim_total_time),
    }
    results_file = os.path.join(
        os.path.join(args.result_dir,
                     results_filename + "_" + str(start_index)))

    np.savez_compressed(results_file,
                        output_v=output_v,
                        neo_spikes_dict=neo_spikes_dict,
                        all_spikes=spikes_dict,
                        all_neurons=extra_params['all_neurons'],
                        testing_examples=testing_examples,
                        N_layer=N_layer,
                        no_testing_examples=testing_examples,
                        num_classes=10,
                        y_test=y_test,
                        input_params=input_params,
                        input_size=N_layer,
                        simtime=simtime,
                        t_stim=t_stim,
                        sim_params=sim_params,
                        final_connectivity=final_connectivity,
                        init_connectivity=extra_params['all_connections'],
                        extra_params=extra_params,
                        current_error=current_error)
    sim.end()

    # Analysis time!
    post_run_analysis(filename=results_file,
                      fig_folder=args.result_dir + args.figures_dir)

    # Report time taken
    print("Results stored in  -- " + results_filename)

    # Report time taken
    print("Total time elapsed -- " + str(total_time))
    return current_error
def run(args):

    # Checking directory structure exists
    if not os.path.isdir(args.result_dir) and not os.path.exists(
            args.result_dir):
        os.mkdir(args.result_dir)

    # Load data from file

    x_train = np.load("dataset/x_train.npz")['arr_0']
    y_train = np.load("dataset/y_train.npz")['arr_0']
    x_test = np.load("dataset/x_test.npz")['arr_0']
    y_test = np.load("dataset/y_test.npz")['arr_0']

    labels = np.load("dataset/labels.npz", allow_pickle=True)['arr_0']
    print(labels)
    # Produce parameter replacement dict
    replace = {
        'e_rev_E': 0.0,
        'tau_m': 20.0,
        'cm': 1.0,
        'v_thresh': -50.0,
        'v_rest': -65.0,
        'i_offset': 0.0,
        'tau_syn_I': 5.0,
        'tau_syn_E': 5.0,
        'tau_refrac': 0.1,
        'v_reset': -65.0,
        'e_rev_I': -70.0
    }

    t_stim = args.t_stim
    runtime = t_stim * args.testing_examples
    example = x_test[1]  # Just doing the first one
    # Generate input params from data
    #input_params = convert_rate_array_to_VRPSS(example, runtime)

    from radioisotopedatatoolbox.DataGenerator import IsotopeRateFetcher, BackgroundRateFetcher, LinearMovementIsotope

    myisotope = IsotopeRateFetcher('Co-60', data_path=path, intensity=0.1)
    background = BackgroundRateFetcher(intensity=0.1, data_path=path)

    moving_isotope = LinearMovementIsotope(myisotope,
                                           background=background,
                                           path_limits=[-2, 2],
                                           duration=t_stim,
                                           min_distance=0.1)

    #input_params = moving_isotope.output
    #del input_params['distances']

    input_params = {'spike_times': moving_isotope.spike_source_array}

    output_v = []
    populations, projections, custom_params = restore_simulator_from_file(
        sim,
        args.model,
        input_type='ssa',
        ssa_cellparams=input_params,
        replace_params=replace)

    cell_type = populations[1].celltype

    standard_tau_m = populations[1].get('tau_m')[0]
    multiplier = 10  #How much slower the AGC is than the normal neurons

    #Work out the weight that causes just one spike from v_rest
    one_spike_weight = 0.1  #for IF_cond_exp. 0.48 for IF_curr_exp

    AGC_input_weight = one_spike_weight / multiplier
    AGC_output_weight = one_spike_weight
    print("Adding AGC inhibitory neurons")
    AGC_pop1 = sim.Population(100, cell_type)

    #These bits probably aren't doing what they should do and require more thought

    # Do an average pooling conv layer 3238 -> 100
    to_AGC_pop1 = sim.Projection(
        populations[0],
        AGC_pop1,
        sim.KernelConnector(shape_pre=(populations[0].size, 1),
                            shape_post=(AGC_pop1.size, 1),
                            shape_kernel=(30, 1),
                            weight_kernel=1 / one_spike_weight / 30 * np.ones(
                                (30, 1))),
        receptor_type='excitatory')

    from_AGC_pop1 = sim.Projection(AGC_pop1, populations[1], sim.AllToAllConnector(),\
                                    sim.StaticSynapse(weight=AGC_output_weight/AGC_pop1.size),\
                                    receptor_type ='inhibitory')

    #Sets the integration time of the AGC neuron
    AGC_integration_time = multiplier * standard_tau_m
    AGC_pop1.set(tau_m=AGC_integration_time)

    dt = sim.get_time_step()
    N_layer = len(populations)
    min_delay = sim.get_min_delay()
    max_delay = sim.get_max_delay()
    sim.set_number_of_neurons_per_core(SpikeSourcePoissonVariable, 16)
    sim.set_number_of_neurons_per_core(sim.SpikeSourcePoisson, 16)
    sim.set_number_of_neurons_per_core(sim.IF_curr_exp, 128)
    sim.set_number_of_neurons_per_core(sim.IF_cond_exp, 128)
    old_runtime = custom_params['runtime']
    set_i_offsets(populations, runtime, old_runtime=old_runtime)
    spikes_dict = {}
    neo_spikes_dict = {}

    def record_output(populations, offset, output):
        spikes = populations[-1].spinnaker_get_data('spikes')
        spikes = spikes + [0, offset]
        name = populations[-1].label
        if np.shape(spikes)[0] > 0:
            if name in list(output.keys()):
                output[name] = np.concatenate((output, spikes))
            else:
                output[name] = spikes
        return output

    for pop in populations[:]:
        pop.record("spikes")
    if args.record_v:
        populations[-1].record("v")
    sim.run(runtime)
    for pop in populations[:]:
        spikes_dict[pop.label] = pop.spinnaker_get_data('spikes')

    if args.record_v:
        output_v = populations[-1].spinnaker_get_data('v')
    # save results
    sim.end()
    if args.result_filename:
        results_filename = args.result_filename
    else:
        results_filename = "isotope_results"
        if args.suffix:
            results_filename += args.suffix
        else:
            import pylab

        now = pylab.datetime.datetime.now()
        results_filename += "_" + now.strftime("_%H%M%S_%d%m%Y")

    np.savez_compressed(
        os.path.join(args.result_dir, results_filename),
        output_v=output_v,
        neo_spikes_dict=neo_spikes_dict,
        y_test=y_test,
        N_layer=N_layer,
        t_stim=t_stim,
        runtime=runtime,
        sim_time=runtime,
        dt=dt,
        custom_params={
            'distances': moving_isotope.distances,
            'isotope_labels':
            GammaRateFetcher.get_possible_sources(data_path=path)
        }**spikes_dict)
    sim.end()