def setupModel(params, settings, dt, simTime, populationsInput, populationsNoiseSource, populationsRN, populationsPN, populationsAN, projectionsPNAN): maxDelay = max([ params['MAX_DELAY_RATECODE_TO_CLUSTER_RN'], params['MAX_DELAY_CLASS_ACTIVITY_TO_CLUSTER_AN'] ]) spynnaker.setup(timestep=dt, min_delay=1, max_delay=maxDelay) setupLayerInput(params, settings, populationsInput) setupLayerNoiseSource(params, simTime, populationsNoiseSource) setupLayerRN(params, neuronModel, cell_params, populationsInput[0], populationsNoiseSource[0], populationsRN) setupLayerPN(params, neuronModel, cell_params, populationsRN, populationsPN) setupLayerAN(params, settings, neuronModel, cell_params, populationsInput[1], populationsNoiseSource[0], populationsPN, populationsAN, projectionsPNAN) printModelConfigurationSummary(params, populationsInput, populationsNoiseSource, populationsRN, populationsPN, populationsAN)
def mapping_process(): ################################### SIM_TIME = TIME_SLOT*DATA_AMOUNT ## ################################### twitter_text_vectors = sentence_to_vec() orig = get_nearest_neighbor(twitter_text_vectors) show_result(orig,1,'./retrived_data/original_classification') response_space = generate_vr_response(twitter_text_vectors) spiking_space = generate_spiking_time(response_space) np.savetxt("./retrived_data/input_time.txt",spiking_space,fmt='%s',delimiter=',',newline='\n') spynnaker.setup(timestep=1) spynnaker.set_number_of_neurons_per_core(spynnaker.IF_curr_exp, 250) pn_population = setupLayer_PN(spiking_space) kc_population = setupLayer_KC() kc_population.record(["spikes"]) pn_kc_projection = setupProjection_PN_KC(pn_population,kc_population) spynnaker.run(SIM_TIME) neo = kc_population.get_data(variables=["spikes"]) spikeData_original= neo.segments[0].spiketrains spynnaker.end() return spikeData_original
def setupModel(settings,params, dt,simTime,populationsInput, populationsNoiseSource, populationsRN,populationsPN,populationsAN,projectionsPNAN): maxDelay = max([params['MAX_DELAY_RATECODE_TO_CLUSTER_RN'],params['MAX_DELAY_CLASS_ACTIVITY_TO_CLUSTER_AN']]) spynnaker.setup(timestep=dt,min_delay=1,max_delay=maxDelay) rnSpikeInjectionPort = settings['RN_SPIKE_INJECTION_PORT'] rnSpikeInjectionPopLabel = settings['RN_SPIKE_INJECTION_POP_LABEL'] classActivationSpikeInjectionPort = settings['CLASS_ACTIVATION_SPIKE_INJECTION_PORT'] classActivationSpikeInjectionPopLabel = settings['CLASS_ACTIVATION_SPIKE_INJECTION_POP_LABEL'] learning = settings['LEARNING'] print 'Setting up input layer..' numInjectionPops = setupLayerInput(params,rnSpikeInjectionPort,rnSpikeInjectionPopLabel, classActivationSpikeInjectionPort,classActivationSpikeInjectionPopLabel, populationsInput,learning) print numInjectionPops , ' spikeInjection populations were needed to accomodate ', params['NUM_VR'], " VRs" print 'Setting up noise source layer..' setupLayerNoiseSource(params, simTime, populationsNoiseSource) print 'Setting up RN layer..' numInputPops = len(populationsInput) injectionPops = populationsInput[1:len(populationsInput)] numInjectionPops = len(injectionPops) setupLayerRN(params,neuronModel, cell_params, injectionPops, populationsNoiseSource[0], populationsRN) #setupLayerRN(params,neuronModel, cell_params, injectionPops, populationsRN) print 'Setting up PN layer..' setupLayerPN(params, neuronModel, cell_params, populationsRN, populationsPN) print 'Setting up AN layer..' setupLayerAN(params, settings, neuronModel, cell_params, populationsInput[0], populationsNoiseSource[0], populationsPN, populationsAN,learning,projectionsPNAN) printModelConfigurationSummary(params, populationsInput, populationsNoiseSource, populationsRN, populationsPN, populationsAN)
def live_spike_receive_translated(self): self.stored_data = list() db_conn = DatabaseConnection(local_port=None) db_conn.add_database_callback(self.database_callback) p.setup(1.0) p.set_number_of_neurons_per_core(p.SpikeSourceArray, 5) pop = p.Population( 25, p.SpikeSourceArray([[1000 + (i * 10)] for i in range(25)])) p.external_devices.activate_live_output_for( pop, translate_keys=True, database_notify_port_num=db_conn.local_port, tag=1, use_prefix=True, key_prefix=self.PREFIX, prefix_type=EIEIOPrefix.UPPER_HALF_WORD) p.run(1500) p.end() self.listener.close() self.conn.close() self.assertGreater(len(self.stored_data), 0) for key, time in self.stored_data: self.assertEqual(key >> 16, self.PREFIX) self.assertEqual(1000 + ((key & 0xFFFF) * 10), time)
def test(spikeTimes, trained_weights,label): #spikeTimes = extractSpikes(sample) runTime = int(max(max(spikeTimes)))+100 ########################################## sim.setup(timestep=1) pre_pop = sim.Population(input_size, sim.SpikeSourceArray, {'spike_times': spikeTimes}, label="pre_pop") post_pop = sim.Population(output_size, sim.IF_curr_exp , cell_params_lif, label="post_pop") if len(trained_weights) > input_size: weigths = [[0 for j in range(output_size)] for i in range(input_size)] #np array? size 1024x25 k=0 for i in range(input_size): for j in range(output_size): weigths[i][j] = trained_weights[k] k += 1 else: weigths = trained_weights connections = [] #k = 0 for n_pre in range(input_size): # len(untrained_weights) = input_size for n_post in range(output_size): # len(untrained_weight[0]) = output_size; 0 or any n_pre #connections.append((n_pre, n_post, weigths[n_pre][n_post]*(wMax), __delay__)) connections.append((n_pre, n_post, weigths[n_pre][n_post]*(wMax)/max(trained_weights), __delay__)) # #k += 1 prepost_proj = sim.Projection(pre_pop, post_pop, sim.FromListConnector(connections), synapse_type=sim.StaticSynapse(), receptor_type='excitatory') # no more learning !! #inhib_proj = sim.Projection(post_pop, post_pop, sim.AllToAllConnector(), synapse_type=sim.StaticSynapse(weight=inhibWeight, delay=__delay__), receptor_type='inhibitory') # no more lateral inhib post_pop.record(['v', 'spikes']) sim.run(runTime) neo = post_pop.get_data(['v', 'spikes']) spikes = neo.segments[0].spiketrains v = neo.segments[0].filter(name='v')[0] f1=pplt.Figure( # plot voltage pplt.Panel(v, ylabel="Membrane potential (mV)", xticks=True, yticks=True, xlim=(0, runTime+100)), # raster plot pplt.Panel(spikes, xlabel="Time (ms)", xticks=True, yticks=True, markersize=2, xlim=(0, runTime+100)), title='Test with label ' + str(label), annotations='Test with label ' + str(label) ) f1.save('plot/'+str(trylabel)+str(label)+'_test.png') f1.fig.texts=[] print("Weights:{}".format(prepost_proj.get('weight', 'list'))) weight_list = [prepost_proj.get('weight', 'list'), prepost_proj.get('weight', format='list', with_address=False)] #predict_label= sim.end() return spikes
def __init__( self, time, num_neurons ): threading.Thread.__init__( self ) p.setup( timestep=1.0, min_delay=1.0, max_delay=144.0 ) self.time = time self.num_neurons = num_neurons self.loop_connections = list() self.weight_to_spike = 2.0 self.delay = 17 self.cell_params = { 'cm' : 0.25, 'i_offset' : 0.0, 'tau_m' : 20.0, 'tau_refrac': 2.0, 'tau_syn_E' : 5.0, 'tau_syn_I' : 5.0, 'v_reset' : -70.0, 'v_rest' : -65.0, 'v_thresh' : -50.0 } self.injector_cell_params = { 'host_port_number' : 12345, 'host_ip_address' : "localhost", 'virtual_key' : 458752, 'prefix' : 7, 'prefix_type' : q.EIEIOPrefixType.UPPER_HALF_WORD } for i in range( 0, self.num_neurons - 1 ): single_connection = ( i, ( ( i + 1 ) % self.num_neurons ), self.weight_to_spike, self.delay ) self.loop_connections.append( single_connection ) self.injection_connections = { 'pop1' : [( 0, 0, self.weight_to_spike, 1 )], 'pop2' : [( 1, 0, self.weight_to_spike, 1 )] } self.populations = { # pop name num neurons neuron type cell parameters label "injector" : p.Population( 2, q.ReverseIpTagMultiCastSource, self.injector_cell_params, label='spike_injector' ), "pop1" : p.Population( self.num_neurons, p.IF_curr_exp, self.cell_params, label='pop1' ), "pop2" : p.Population( self.num_neurons, p.IF_curr_exp, self.cell_params, label='pop2' ), "pop3" : p.Population( self.num_neurons, p.IF_curr_exp, self.cell_params, label='pop3' ) } self.projections = [ p.Projection( self.populations['pop1'], self.populations['pop1'], p.FromListConnector( self.loop_connections ) ), p.Projection( self.populations['pop2'], self.populations['pop2'], p.FromListConnector( self.loop_connections ) ), p.Projection( self.populations['pop2'], self.populations['pop3'], p.FromListConnector( self.loop_connections ) ), p.Projection( self.populations['injector'], self.populations['pop1'], p.FromListConnector( self.injection_connections['pop1'] ) ), p.Projection( self.populations['injector'], self.populations['pop2'], p.FromListConnector( self.injection_connections['pop2'] ) ) ]
def generate_data(): spikesTrain = [] organisedData = {} for i in range(input_class): for j in range(input_len): neuid = (i, j) organisedData[neuid] = [] for i in range(input_len): for j in range(output_size): neuid = (j, i) organisedData[neuid].append(j * input_len * v_co * 5 + i * v_co) organisedData[neuid].append(j * input_len * v_co * 5 + input_len * v_co * 1 + i * v_co) organisedData[neuid].append(j * input_len * v_co * 5 + input_len * v_co * 2 + i * v_co) organisedData[neuid].append(j * input_len * v_co * 5 + input_len * v_co * 3 + i * v_co) organisedData[neuid].append(j * input_len * v_co * 5 + input_len * v_co * 4 + i * v_co) organisedData[neuid].append(input_len * v_co * (3 * 5 + j) + i * v_co) #organisedData[neuid].append(i*v_co+2) # if neuid not in organisedData: # organisedData[neuid]=[i*v_co] # else: # organisedData[neuid].append(i*v_co) for i in range(input_class): for j in range(input_len): neuid = (i, j) organisedData[neuid].sort() spikesTrain.append(organisedData[neuid]) runTime = int(max(max(spikesTrain))) sim.setup(timestep=1) noise = sim.Population(input_size, sim.SpikeSourcePoisson(), label='noise') noise.record(['spikes']) #noise sim.run(runTime) neonoise = noise.get_data(["spikes"]) spikesnoise = neonoise.segments[0].spiketrains #noise sim.end() for i in range(input_size): for noisespike in spikesnoise[i]: spikesTrain[i].append(noisespike) spikesTrain[i].sort() return spikesTrain
def setupModel(settings,params, dt,simTime,populationsInput, populationsNoiseSource, populationsRN,populationsPN,populationsAN,projectionsPNAN): maxDelay = max([params['MAX_DELAY_RATECODE_TO_CLUSTER_RN'],params['MAX_DELAY_CLASS_ACTIVITY_TO_CLUSTER_AN']]) spynnaker.setup(timestep=dt,min_delay=1,max_delay=maxDelay) spikeSourceVrResponsePath = settings['SPIKE_SOURCE_VR_RESPONSE_PATH'] spikeSourceActiveClassPath = settings['SPIKE_SOURCE_ACTIVE_CLASS_PATH'] learning = settings['LEARNING'] setupLayerInput(params, spikeSourceVrResponsePath, spikeSourceActiveClassPath, populationsInput,learning) setupLayerNoiseSource(params, simTime, populationsNoiseSource) setupLayerRN(params,neuronModel, cell_params, populationsInput[0], populationsNoiseSource[0], populationsRN) setupLayerPN(params, neuronModel, cell_params, populationsRN, populationsPN) setupLayerAN(params, settings, neuronModel, cell_params, populationsInput[1], populationsNoiseSource[0], populationsPN, populationsAN,learning,projectionsPNAN) printModelConfigurationSummary(params, populationsInput, populationsNoiseSource, populationsRN, populationsPN, populationsAN)
def mapping_process(): assert SIM_TIME > 0 spynnaker.setup(timestep=1) spynnaker.set_number_of_neurons_per_core(spynnaker.IF_curr_exp, 50) time_space = readData() pn_population = setupLayer_PN(time_space) kc_population = setupLayer_KC() kc_population.record(["spikes"]) pn_kc_projection = setupProjection_PN_KC(pn_population, kc_population) spynnaker.run(SIM_TIME) neo = kc_population.get_data(variables=["spikes"]) spikeData_original = neo.segments[0].spiketrains spynnaker.end() return spikeData_original
def main(): # setup timestep of simulation and minimum and maximum synaptic delays setup(timestep=simulationTimestep, min_delay=minSynapseDelay, max_delay=maxSynapseDelay, threads=4) # create a spike sources retinaLeft = createSpikeSource("Retina Left") retinaRight = createSpikeSource("Retina Right") # create network and attach the spike sources network = createCooperativeNetwork(retinaLeft=retinaLeft, retinaRight=retinaRight) # run simulation for time in milliseconds print "Simulation started..." run(simulationTime) print "Simulation ended." # plot results plotExperiment(retinaLeft, retinaRight, network) # finalise program and simulation end()
def setupModel( params, settings, dt, simTime, populationsInput, populationsNoiseSource, populationsRN, populationsPN, populationsAN, projectionsPNAN, ): maxDelay = max([params["MAX_DELAY_RATECODE_TO_CLUSTER_RN"], params["MAX_DELAY_CLASS_ACTIVITY_TO_CLUSTER_AN"]]) spynnaker.setup(timestep=dt, min_delay=1, max_delay=maxDelay) setupLayerInput(params, settings, populationsInput) setupLayerNoiseSource(params, simTime, populationsNoiseSource) setupLayerRN(params, neuronModel, cell_params, populationsInput[0], populationsNoiseSource[0], populationsRN) setupLayerPN(params, neuronModel, cell_params, populationsRN, populationsPN) setupLayerAN( params, settings, neuronModel, cell_params, populationsInput[1], populationsNoiseSource[0], populationsPN, populationsAN, projectionsPNAN, ) printModelConfigurationSummary( params, populationsInput, populationsNoiseSource, populationsRN, populationsPN, populationsAN )
def mapping_process(): ################################### SIM_TIME = TIME_SLOT * DATA_AMOUNT ## ################################### spynnaker.setup(timestep=1) spynnaker.set_number_of_neurons_per_core(spynnaker.IF_curr_exp, 50) # time_space = readData() # Manage to obtain data correctly pn_population = setupLayer_PN(time_space) kc_population = setupLayer_KC() kc_population.record(["spikes"]) pn_kc_projection = setupProjection_PN_KC(pn_population, kc_population) spynnaker.run(SIM_TIME) neo = kc_population.get_data(variables=["spikes"]) spikeData_original = neo.segments[0].spiketrains spynnaker.end() return spikeData_original
def mapping_process(): ################################### SIM_TIME = TIME_SLOT*DATA_AMOUNT ## ################################### response_space = generate_vr_response() spiking_space = generate_spiking_time(response_space) spynnaker.setup(timestep=1) spynnaker.set_number_of_neurons_per_core(spynnaker.IF_curr_exp, 250) pn_population = setupLayer_PN(spiking_space) kc_population = setupLayer_KC() kc_population.record(["spikes"]) pn_kc_projection = setupProjection_PN_KC(pn_population,kc_population) spynnaker.run(SIM_TIME) neo = kc_population.get_data(variables=["spikes"]) spikeData_original= neo.segments[0].spiketrains spynnaker.end() return spikeData_original
import json from spinn_machine.json_utils import (to_json, from_json_path) import pyNN.spiNNaker as sim n_boards = 480 n_chips_required = 0 - n_boards sim.setup(timestep=1.0, n_chips_required=n_chips_required) machine = sim.get_machine() sim.end() print("Got machine") jpath = "{}board.json".format(n_boards) j_machine = to_json(machine) with open(jpath, "w") as f: json.dump(j_machine, f) machine2 = from_json_path(jpath) j_machine2 = to_json(machine2) testpath = "test.json" with open(testpath, "w") as f: json.dump(j_machine2, f) assert j_machine == j_machine2
import pyNN.spiNNaker as sim sim.setup() p1 = sim.Population(3, sim.SpikeSourceArray, {"spike_times": [1.0, 2.0, 3.0]}) p2 = sim.Population(3, sim.SpikeSourceArray, {"spike_times": [[10.0], [20.0], [30.0]]}) p3 = sim.Population(4, sim.IF_cond_exp, {}) sim.Projection(p2, p3, sim.FromListConnector([ (0, 0, 0.1, 1.0), (1, 1, 0.1, 1.0), (2, 2, 0.1, 1.0)])) #sim.Projection(p1, p3, sim.FromListConnector([(0, 3, 0.1, 1.0)])) # works if this line is added sim.run(100.0)
return spike_times_1, spike_times_2 #millisecond*second*minutes*hour in a day #runtime = 1000*60*60*24 runtime = 1000#*60*10 #10 min stimtime = 500 weight_to_spike = 8. timestep = 1. min_delay = 1. max_delay = 20. sim.setup(timestep=timestep, min_delay = min_delay, max_delay = max_delay) max_weight = 10. min_weight = 0.0 a_plus = 0.1 a_minus = 0.12 tau_plus = 20. tau_minus = 20. num_exc = 800 num_inh = 200 total_neurons = num_exc + num_inh max_conn_per_neuron = 100 conn_prob = 0.1#float(max_conn_per_neuron)/float(total_neurons) cell_type = sim.IZK_curr_exp
# Population parameters model = sim.IF_cond_exp cell_params = {'cm': 0.25, # nF 'i_offset': 0.0, 'tau_m': 10.0, 'tau_refrac': 2.0, 'tau_syn_E': 2.5, 'tau_syn_I': 2.5, 'v_reset': -70.0, 'v_rest': -65.0, 'v_thresh': -55.4 } # SpiNNaker setup sim.setup(timestep=0.1, min_delay=0.1, max_delay=1.0) # ------------------------------------------------------------------- # Experiment loop # ------------------------------------------------------------------- projections = [] sim_time = 0 for t in delta_t: # Calculate phase of input spike trains # If M.A.D., take into account dendritic delay if mad: # Pre after post if t > 0: post_phase = 0 pre_phase = t + 1 # Post after pre
LOGDEBUG = 1 LOGINFO = 2 LOGLEVEL = LOGINFO #### DEFAULTS tau_syn = 100 # default value neurons_per_core = 100 # default value #### COUNTERS n_outs = 0 # output counter for placing NEF_out populations in chip 0,0 # Setting up the connection with the DB and the number of cells spinn.setup(db_name = os.path.abspath('%s/nengo_model.sqlite' % pacman.BINARIES_DIRECTORY)) spinn.get_db().set_number_of_neurons_per_core('NEF_out_2d', neurons_per_core*2) # this will set one population per core #spinn.get_db().set_number_of_neurons_per_core('IF_curr_exp_32', neurons_per_core) # this will set one population per core spinn.get_db().set_number_of_neurons_per_core('IF_NEF_input_2d', neurons_per_core*2) # this will set one population per core # PACMAN INTERFACES class general_cell(): """ PACMAN Interface class giving a cell type with parameters, class, size, label, id and mapping constraints """ def __init__(self, cellname): self.__name__ = cellname
class MySpiNNakerLinkDevice(ApplicationSpiNNakerLinkVertex): def __init__( self, n_neurons, spinnaker_link_id, label=None): ApplicationSpiNNakerLinkVertex.__init__( self, n_neurons, spinnaker_link_id, label=label) class MySpiNNakerLinkDeviceDataHolder(DataHolder): def __init__(self, spinnaker_link_id, label=None): DataHolder.__init__( self, {"spinnaker_link_id": spinnaker_link_id, "label": label}) @staticmethod def build_model(): return MySpiNNakerLinkDevice p.setup(1.0) poisson = p.Population(1, p.SpikeSourcePoisson(rate=100)) device = p.Population(1, MySpiNNakerLinkDeviceDataHolder(spinnaker_link_id=1)) p.external_devices.activate_live_output_to(poisson, device) p.run(100) p.end()
setattr(population, variable, s[label][variable]) if label != 'InputLayer': population.set(i_offset=s[label]['i_offset']) layers.append(population) return layers, s['labels'], n_neurons # run for every participant and every fold accs_all = [] stds_all = [] for part in participants: print("running particpant: " + str(part)) allfoldacc = [] for fold in np.arange(0, 5): pynn.setup(dt, min_delay=dt) if spin: pynn.set_number_of_neurons_per_core(pynn.IF_curr_exp, 64) path = "models/subject_" + str(part) + "/fold" + str(fold) # load data test_data = np.load(path + "/x_test.npz")['arr_0'] test_labels = np.load(path + "/y_test.npz")['arr_0'] pred_labels = [] num_test = len(test_data) # create network network, labels, n_neurons = load_assembly( path, "tsfold" + str(fold) + "_nest", pynn)
# Get the number of cores and use this to make the simulation n_cores = 0 for chip in machine.chips: for processor in chip.processors: if not processor.is_monitor: n_cores += 1 n_pops = int(math.floor(n_cores / 2.0)) print "n_cores =", n_cores, "n_pops =", n_pops spike_times = range(0, n_pops * spike_gap, spike_gap) run_time = (n_pops + 1) * spike_gap + 1 for do_injector_first in [True, False]: # Set up for execution p.setup(1.0, machine=hostname) injectors = list() for i in range(n_pops): injector = None (x1, y1, p1) = cores[i * 2] (x2, y2, p2) = cores[(i * 2) + 1] if do_injector_first: injector = create_injector(i, x1, y1, p1) else: injector = create_injector(i, x2, y2, p2) injectors.append(injector) populations = list() for i in range(n_pops): pop = None
cm = cm*area*1000 # convert to nF Rm = 1e-6/(g_leak*area) # membrane resistance in MΩ assert tau_m == cm*Rm # just to check n_exc = int(round((n*r_ei/(1+r_ei)))) # number of excitatory cells n_inh = n - n_exc # number of inhibitory cells celltype = sim.IF_curr_exp w_exc = 1e-3*Gexc*(Erev_exc - v_mean) # (nA) weight of excitatory synapses w_inh = 1e-3*Ginh*(Erev_inh - v_mean) # (nA) assert w_exc > 0; assert w_inh < 0 # === Build the network ======================================================== extra = {'threads' : threads, 'label': 'VA'} node_id = sim.setup(timestep=dt, min_delay=delay, max_delay=delay, **extra) print "%s Initialising the simulator with %d thread(s)..." % (node_id, extra['threads']) cell_params = { 'tau_m' : tau_m, 'tau_syn_E' : tau_exc, 'tau_syn_I' : tau_inh, 'v_rest' : E_leak, 'v_reset' : v_reset, 'v_thresh' : v_thresh, 'cm' : cm, 'tau_refrac' : t_refrac} print "%s Creating cell populations..." % node_id exc_cells = sim.Population(n_exc, celltype, cell_params, label="Excitatory_Cells") inh_cells = sim.Population(n_inh, celltype, cell_params, label="Inhibitory_Cells") print "%s Initialising membrane potential to random values..." % node_id
This example requires that the NeuroTools package is installed (http://neuralensemble.org/trac/NeuroTools) Authors : Catherine Wacongne < *****@*****.** > Xavier Lagorce < *****@*****.** > April 2013 """ import numpy, pylab, random, sys import NeuroTools.signals as nt import pyNN.spiNNaker as sim # SpiNNaker setup sim.setup(timestep=1.0,min_delay=1.0,max_delay=10.0, db_name='stdp_example.sqlite') # +-------------------------------------------------------------------+ # | General Parameters | # +-------------------------------------------------------------------+ # Population parameters model = sim.IF_cond_exp cell_params = { 'cm' : 0.281, # membrane capacitance nF 'e_rev_E' : 0.0, # excitatory reversal potential in mV 'e_rev_I' : -80.0, # inhibitory reversal potential in mV 'i_offset' : 0.0, # offset current 'tau_m' : 9.3667, # membrane time constant 'tau_refrac' : 0.1, # absolute refractory period 'tau_syn_E' : 5.0, # excitatory synaptic time constant
""" Synfirechain-like example Expected results in .. figure:: ./examples/results/synfire_chain_lif.png """ #!/usr/bin/python import pyNN.spiNNaker as p import numpy, pylab p.setup(timestep=1.0, min_delay = 1.0, max_delay = 8.0, db_name='synfire.sqlite') n_pop = 16 # number of populations nNeurons = 10 # number of neurons in each population p.get_db().set_number_of_neurons_per_core('IF_curr_exp', nNeurons) # this will set one population per core # random distributions rng = p.NumpyRNG(seed=28374) delay_distr = p.RandomDistribution('uniform', [1,10], rng=rng) weight_distr = p.RandomDistribution('uniform', [0,2], rng=rng) v_distr = p.RandomDistribution('uniform', [-55,-95], rng) cell_params_lif_in = { 'tau_m' : 32, 'v_init' : -80, 'v_rest' : -75, 'v_reset' : -95,
""" Synfirechain-like example Expected results in .. figure:: ./examples/results/synfire_chain_lif.png """ #!/usr/bin/python import pyNN.spiNNaker as p import numpy, pylab p.setup(timestep=1.0, min_delay=1.0, max_delay=8.0, db_name='synfire.sqlite') n_pop = 16 # number of populations nNeurons = 10 # number of neurons in each population p.get_db().set_number_of_neurons_per_core( 'IF_curr_exp', nNeurons) # this will set one population per core # random distributions rng = p.NumpyRNG(seed=28374) delay_distr = p.RandomDistribution('uniform', [1, 10], rng=rng) weight_distr = p.RandomDistribution('uniform', [0, 2], rng=rng) v_distr = p.RandomDistribution('uniform', [-55, -95], rng) cell_params_lif_in = { 'tau_m': 32, 'v_init': -80, 'v_rest': -75, 'v_reset': -95, 'v_thresh': -55,
$ ./stdp_example This example requires that the NeuroTools package is installed (http://neuralensemble.org/trac/NeuroTools) Authors : Catherine Wacongne < *****@*****.** > Xavier Lagorce < *****@*****.** > April 2013 """ import pylab import pyNN.spiNNaker as sim # SpiNNaker setup sim.setup(timestep=1.0, min_delay=1.0, max_delay=10.0) # +-------------------------------------------------------------------+ # | General Parameters | # +-------------------------------------------------------------------+ # Population parameters model = sim.IF_curr_exp cell_params = {'cm': 0.25, 'i_offset': 0.0, 'tau_m': 20.0, 'tau_refrac': 2.0, 'tau_syn_E': 5.0, 'tau_syn_I': 5.0, 'v_reset': -70.0,
Each population has 256 neurons (4 neurons x 64 channels). Neurons in each population (ear) are numbered from 0 to 255, where:: id = ( channel * 4 ) + neuron .. moduleauthor:: Francesco Galluppi, SpiNNaker Project, [email protected] """ from pyNN.utility import get_script_args from pyNN.errors import RecordingError import pyNN.spiNNaker as p p.setup(timestep=1.0,min_delay=1.0,max_delay=10.0, db_name='cochlea_example.sqlite') nNeurons = 4 * 64 # 4 neurons and 64 channels per ear p.get_db().set_number_of_neurons_per_core('IF_curr_exp', nNeurons) # this will set 256 neurons per core cell_params = { 'i_offset' : .1, 'tau_refrac' : 3.0, 'v_rest' : -65.0, 'v_thresh' : -51.0, 'tau_syn_E' : 2.0, 'tau_syn_I': 5.0, 'v_reset' : -70.0, 'e_rev_E' : 0., 'e_rev_I' : -80.} left_cochlea_ear = p.Population(nNeurons, p.ProxyNeuron, {'x_source':254, 'y_source':254}, label='left_cochlea_ear') left_cochlea_ear.set_mapping_constraint({'x':0, 'y':0}) left_cochlea_ear.record() # this should record spikes from the cochlea
import matplotlib.pyplot as plt import matplotlib.gridspec as gs import os #import pyNN.nest as pynn import pyNN.spiNNaker as pynn #set sim parameters sim_time = 150 dt = 0.1 refrac = 0 start_test=78 end_test= 84 print("trials from "+str(start_test)+ "to "+str(end_test) pynn.setup(dt) pynn.set_number_of_neurons_per_core(pynn.IF_curr_exp, 64) weight_scale = 1 rescale_fac = 1000/(1000*dt) #load data test_data = np.load("x_test.npz")['arr_0'][start_test:end_test] test_labels = np.load("y_test.npz")['arr_0'][start_test:end_test] pred_labels = [] #create network network = [] #cell defaults
""" Synfirechain-like example """ import pyNN.spiNNaker as p import pylab import numpy p.setup(timestep=1.0, min_delay=1.0, max_delay=144.0) nNeurons = 200 # number of neurons in each population p.set_number_of_neurons_per_core("IF_curr_exp", nNeurons / 2) runtime = 1000 cell_params_lif = {'cm': 0.25, 'i_offset': 0.0, 'tau_m': 20.0, 'tau_refrac': 2.0, 'tau_syn_E': 5.0, 'tau_syn_I': 5.0, 'v_reset': -70.0, 'v_rest': -65.0, 'v_thresh': -50.0 } populations = list() projections = list() weight_to_spike = 2.0 delay = 17 loopConnections = list() for i in range(0, nNeurons):
def run_sim(self): """ Sets up and runs the simulation """ num_neurons = 1471 # total neurons in network num_inputs = 14 # number of neurons considered inputs num_runs = 1 # number of times to loop the learning num_samples = 1 # number of samples to learn` sim_time = 1000.0 # time to run sim for` inhibitory_split = 0.2 connection_probability_factor = 0.02 plot_spikes = True save_figures = True show_figures = True sim_start_time = strftime("%Y-%m-%d_%H:%M") cell_params_lif = {'cm': 0.25, 'i_offset': 0.0, 'tau_m': 10.0, 'tau_refrac': 2.0, 'tau_syn_E': 3.0, 'tau_syn_I': 3.0, 'v_reset': -65.0, 'v_rest': -65.0, 'v_thresh': -50.0} # Create the 3d structure of the NeuCube based on the user's given structure file network_structure = NetworkStructure() network_structure.load_structure_file() network_structure.load_input_location_file() # Calculate the inter-neuron distance to be used in the small world connections network_structure.calculate_distances() # Generate two connection matrices for excitatory and inhibitory neurons based on your defined split network_structure.calculate_connection_matrix(inhibitory_split, connection_probability_factor) # Get these lists to be used when connecting the neurons later excitatory_connection_list = network_structure.get_excitatory_connection_list() inhibitory_connection_list = network_structure.get_inhibitory_connection_list() # Choose the correct neurons to connect them to, based on your a-priori knowledge of the data source -- eg, EEG # to 10-20 locations, fMRI to voxel locations, etc. input_neuron_indexes = network_structure.find_input_neurons() # Make the input connections based on this new list input_weight = 4.0 input_connection_list = [] for index, input_neuron_index in enumerate(input_neuron_indexes): input_connection_list.append((index, input_neuron_index, input_weight, 0)) for run_number in xrange(num_runs): excitatory_weights = [] inhibitory_weights = [] for sample_number in xrange(num_samples): # At the moment with the limitations of the SpiNNaker hardware we have to reinstantiate EVERYTHING # each run. In future there will be some form of repetition added, where the structure stays in memory # on the SpiNNaker and only the input spikes need to be updated. data_prefix = sim_start_time + "_r" + str(run_number + 1) + "-s" + str(sample_number + 1) # Set up the hardware - min_delay should never be less than the timestep. # Timestep should = 1.0 (ms) for normal realtime applications p.setup(timestep=1.0, min_delay=1.0) p.set_number_of_neurons_per_core("IF_curr_exp", 100) # Create a population of neurons for the reservoir neurons = p.Population(num_neurons, p.IF_curr_exp, cell_params_lif, label="Reservoir") # Setup excitatory STDP timing_rule_ex = p.SpikePairRule(tau_plus=20.0, tau_minus=20.0) weight_rule_ex = p.AdditiveWeightDependence(w_min=0.1, w_max=1.0, A_plus=0.02, A_minus=0.02) stdp_model_ex = p.STDPMechanism(timing_dependence=timing_rule_ex, weight_dependence=weight_rule_ex) # Setup inhibitory STDP timing_rule_inh = p.SpikePairRule(tau_plus=20.0, tau_minus=20.0) weight_rule_inh = p.AdditiveWeightDependence(w_min=0.0, w_max=0.6, A_plus=0.02, A_minus=0.02) stdp_model_inh = p.STDPMechanism(timing_dependence=timing_rule_inh, weight_dependence=weight_rule_inh) # record the spikes from that population neurons.record('spikes') # Generate a population of SpikeSourceArrays containing the encoded input spike data # eg. spike_sources = p.Population(14, p.SpikeSourceArray, {'spike_times': [[]]}) # for the moment I'm going to cheat and just use poisson trains as I don't have data with me spike_sources = p.Population(num_inputs, p.SpikeSourcePoisson, {'rate': rand.randint(20, 80)}, label="Poisson_pop_E") # Connect the input spike sources with the "input" neurons connected_inputs = p.Projection(spike_sources, neurons, p.FromListConnector(input_connection_list)) # If we have weights saved/recorded from a previous run of this network, load them into the structure # population.set(weights=weights_list) and population.setWeights(weight_list) are not supported in # SpiNNaker at the moment so we have to do this manually. if excitatory_weights and inhibitory_weights: for index, ex_connection in enumerate(excitatory_connection_list): ex_connection[2] = excitatory_weights[index] for index, in_connection in enumerate(inhibitory_connection_list): in_connection[2] = inhibitory_weights[index] # Setup the connectors excitatory_connector = p.FromListConnector(excitatory_connection_list) inhibitory_connector = p.FromListConnector(inhibitory_connection_list) # Connect the excitatory and inhibitory neuron populations connected_excitatory_neurons = p.Projection(neurons, neurons, excitatory_connector, synapse_dynamics=p.SynapseDynamics(slow=stdp_model_ex), target="excitatory") connected_inhibitory_neurons = p.Projection(neurons, neurons, inhibitory_connector, synapse_dynamics=p.SynapseDynamics(slow=stdp_model_inh), target="inhibitory") # Set up recording the spike trains of all the neurons neurons.record() spike_sources.record() # Run the actual simulation p.run(sim_time) # Save the output spikes spikes_out = neurons.getSpikes(compatible_output=True) input_spikes_out = spike_sources.getSpikes(compatible_output=True) # Get the synaptic weights of all the neurons excitatory_weights = connected_excitatory_neurons.getWeights() inhibitory_weights = connected_inhibitory_neurons.getWeights() # when we're all done, clean up p.end() # Make some plots, save them if required. Check if you need to either save or show them, because if not, # there's no point wasting time plotting things nobody will ever see. if plot_spikes and (save_figures or show_figures): plot = Plot(save_figures, data_prefix) # Plot the 3D structure of the network plot.plot_structure(network_structure.get_positions(), figure_number=0) # Plot the spikes plot.plot_spike_raster(spikes_out, sim_time, num_neurons, figure_number=1) # Plot the weights plot.plot_both_weights(excitatory_weights, inhibitory_weights, figure_number=2) # If we want to show the figures, show them now, otherwise ignore and move on if show_figures: # Show them all at once plot.show_plots() plot.clear_figures() plot = None
import pyNN.spiNNaker as p import retina_lib input_size = 128 # Size of each population subsample_size = 32 runtime = 60 # Simulation Setup p.setup(timestep=1.0, min_delay=1.0, max_delay=11.0) # Will add some extra parameters for the spinnPredef.ini in here p.set_number_of_neurons_per_core("IF_curr_exp", 128) # this will set one population per core cell_params = { "tau_m": 64, "i_offset": 0, "v_rest": -75, "v_reset": -95, "v_thresh": -40, "tau_syn_E": 15, "tau_syn_I": 15, "tau_refrac": 2, } # external stuff population requiremenets connected_chip_coords = {"x": 0, "y": 0} virtual_chip_coords = {"x": 0, "y": 5} link = 4 print "Creating input population: %d x %d" % (input_size, input_size)
#!/usr/bin/env python import IPython import pyNN.spiNNaker as p from pylab import * nn_pre = 1000 nn_post = 16 nn_teach = 16 # neuron count timestep = 0.6 duration = 100 * 1000 p.setup(timestep=timestep, min_delay=timestep, max_delay=10.0) # p.set_number_of_neurons_per_core("IF_cond_exp", 12) cell_params = { "cm": 0.25, # nF "i_offset": 0.0, "tau_m": 10.0, "tau_refrac": 2.0, "tau_syn_E": 2.5, "tau_syn_I": 2.5, "v_reset": -70.0, "v_rest": -65.0, "v_thresh": -54.4, } cellparams_pclayer = { "tau_refrac": 2.58198889747, "tau_m": 40.343576523, "e_rev_E": 0.0, "cm": 0.645497224368, "e_rev_I": -80.0,
def test_snn(randomness = False, data_dir = "data/X_test_zied.npy", cls_dir = "data/y_test_zied.npy", data = "load", # pass data as argument cls = "load"): # pass labels as argument ############################################################################### ## Function Definitions ############################################################################### def gaussian(x, mu, sig): return np.float16(np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))) def calc_pop_code(feature, rng1, rng2, num): interval = np.float(rng2 - rng1) / num means = np.arange(rng1 + interval,rng2 + interval, interval) pop_code = [gaussian(feature, mu, 0.025) for mu in means] return pop_code def PoissonTimes2(t_str=0., t_end=100., rate=10., seed=1.): times = [t_str] rng = np.random.RandomState(seed=seed) cont = True while cont == True: t_next = np.floor(times[-1] + 1000. * next_spike_times(rng, rate)) if t_next < t_end - 30: times.append(t_next[0]) else: cont = False return times[1:] def PoissonTimes(t_str=0., t_end=100., rate=10., seed=1.): if rate > 0: interval = (t_end - t_str+0.) / rate times = np.arange(t_str + 30, t_end - 40, interval) return list(times) else: return [] def next_spike_times(rng,rate): return -np.log(1.0 - rng.rand(1)) / rate def ismember(a, b): b = [b] bind = {} for i, elt in enumerate(b): if elt not in bind: bind[elt] = i aa=[bind.get(itm, -1) for itm in a] return sum(np.array(aa) + 1.) ############################################################################### ## Parameters ############################################################################### # Load Parameter parameters = np.load("output_files/parameters1.npy") parameters = parameters.item() # Load test data if data == "load" and cls == "load": data = np.load(data_dir) cls = np.load(cls_dir) # Simulation Parameters trial_num = parameters["trial_num"] # How many samples (trials) from data will be presented n_trials = len(cls)#10#20 #int(trial_num) # Total trials time_int_trials = parameters["time_int_trials"] # (ms) Time to present each trial data SIM_TIME = n_trials * time_int_trials # Total simulation time (ms) ts = parameters["ts"] # Timestep of Spinnaker (ms) min_del = ts max_del = 144 * ts p.setup(timestep=ts, min_delay=min_del, max_delay=max_del) ## Neuron Numbers n_feature = parameters["n_feature"] # Number of features n_pop = parameters["n_pop"] # Number of neurons in one population n_cl = parameters["n_cl"] # Number of classes at the output ## Connection Parameters # Weights wei_src_enc = parameters["wei_src_enc"] # From Source Array at input to Encoding Layer(Exc) wei_enc_filt = parameters["wei_enc_filt"] # From Encoding Layer to Filtering Layer Exc neurons (Exc) wei_filt_inh = parameters["wei_filt_inh"] # From Filtering Layer Inh neurons to Exc neurons (Inh) wei_cls_exc = parameters["wei_cls_exc"] # From Output Layer Exc neurons to Inh neurons (Exc) wei_cls_inh = parameters["wei_cls_inh"] # From Output Layer Inh neurons to Exc neurons (Inh) wei_noise_poi = parameters["wei_noise_poi"] # Delays del_src_enc = np.load("output_files/parameters2.npy") del_enc_filt = parameters["del_enc_filt"] del_init_stdp = parameters["del_init_stdp"] del_cls_exc = parameters["del_cls_exc"] del_cls_inh = parameters["del_cls_inh"] del_noise_poi = parameters["del_noise_poi"] # Firing Rates noise_poi_rate = parameters["noise_poi_rate"] max_fr_input = parameters["max_fr_input"] # maximum firing rate at the input layer max_fr_rate_output = parameters["max_fr_rate_output"] # Maximum firing rate at output (supervisory signal) ## Connection Probabilities prob_filt_inh = parameters["prob_filt_inh"] # Prob of connectivity inhi-connections at Filtering Layer prob_stdp = parameters["prob_stdp"] # Probability of STDP connections prob_output_inh = parameters["prob_output_inh"] # Prob of inhi-connections at Output Layer prob_noise_poi_conn = parameters["prob_noise_poi_conn"] ## STDP Parameters tau_pl = parameters["tau_pl"] #5 tau_min = tau_pl stdp_w_max = parameters["stdp_w_max"] stdp_w_min = parameters["stdp_w_min"] stdp_A_pl = parameters["stdp_A_pl"] stdp_A_min = -stdp_A_pl # minus in order to get symmetric curve ## Neuron Parameters cell_params_lif = {'cm': 1., 'i_offset': 0.0, 'tau_m': 20., 'tau_refrac': 2.0, 'tau_syn_E': 5.0, 'tau_syn_I': 5.0, 'v_reset': -70.0, 'v_rest': -65.0, 'v_thresh': -65.0 } ############################################################################### ## Data Extraction ############################################################################### ## Extract Feature Data scale_data = parameters["scale_data"] # Scale features into [0-scale_data] range #data = np.load("features_without_artifact.npy") #data = np.load('X_test.npy') r, c = np.shape(data) # Threshold (to keep spikes amplitudes in range) thr_data_plus = parameters["thr_data_plus"] thr_data_minus = parameters["thr_data_minus"] data_rates = np.reshape(data, (1, r * c))[0] # Shift an normalize the data #dd = [d if d<thr_data_plus else thr_data_plus for d in data_rates] #dd = [d if d>thr_data_minus else thr_data_minus for d in dd] #dd2 = np.array(dd) - min(dd) #dd2 = dd2 / max(dd2) * 2 dd2 = np.array(data_rates) - min(data_rates) dd2 = dd2 / max(dd2) * 2 new_data_rates = [] for r in dd2: new_data_rates += calc_pop_code(r, 0., scale_data, n_feature / (n_pop + 0.0)) data_rates = list(max_fr_input * np.array(new_data_rates)) ## Extract Class Data #cls = np.load("classes_without_artifact.npy") #cls = np.load("y_test.npy") cls = cls.reshape(len(cls), 1) r_cl, c_cl = np.shape(cls) cls = list(np.reshape(cls, (1, r_cl * c_cl))[0]) outputs = cls[:n_trials] poi_rate = data_rates[:n_feature * n_trials] t1 = 0#70 t2 = int(t1 + n_trials) outputs = cls[t1:t2] poi_rate = data_rates[t1 * n_feature:n_feature * t2] ############################################################################### ## Create populations for different layers ############################################################################### poi_layer = [] enc_layer = [] filt_layer_exc = [] out_layer_exc = [] out_layer_inh = [] # Calculate poisson spike times for features spike_times = [[] for i in range(n_feature)] for i in range(n_trials): t_st = i * time_int_trials t_end = t_st + time_int_trials ind = i * n_feature for j in range(n_feature): times = PoissonTimes(t_st, t_end, poi_rate[ind+j], np.random.randint(100)) for t in times: spike_times[j].append(t) if randomness == True: # if True: calculate "spike_times" (randomly) new # if False: load previously saved "spike_times" np.save('output_files/spike_times_test.npy', spike_times) else: spike_times = np.load('output_files/spike_times_test.npy') # Spike source of input layer spike_source = p.Population(n_feature, p.SpikeSourceArray, {'spike_times':spike_times}, label='spike_source') enc_layer = p.Population(n_feature * n_pop, p.IF_curr_exp, cell_params_lif, label='enc_layer') filt_layer = p.Population(n_feature * n_pop, p.IF_curr_exp, cell_params_lif, label='filt_layer') #filt_layer_inh=p.Population(n_feature*n_pop, p.IF_curr_exp, cell_params_lif, label='filt_layer_inh') for i in range(n_cl): out_layer_exc.append(p.Population(n_pop, p.IF_curr_exp, cell_params_lif, label='out_layer_exc{}'.format(i))) out_layer_inh.append(p.Population(n_pop, p.IF_curr_exp, cell_params_lif, label='out_layer_inh{}'.format(i))) out_layer_exc[i].record() poisson_input = p.Population(n_pop * 2, p.SpikeSourcePoisson, {"rate":noise_poi_rate}) enc_layer.record() filt_layer.record() ############################################################################### ## Projections ############################################################################### ## Connection List from Spike Source Array to Encoding Layer conn_inp_enc = np.load("output_files/conn_inp_enc.npy") #Connection List for Filtering Layer Inhibitory conn_filt_inh = np.load("output_files/conn_filt_inh.npy") ## STDP Connection List conn_stdp_list = np.load("output_files/conn_stdp_list.npy") diff_ind = np.load("output_files/diff_ind_filt.npy") diff_ind2 = np.load("output_files/diff_ind_filt2.npy") diff_thr2 = np.load("output_files/diff_thr2.npy") c1 = 0 for cls_list in conn_stdp_list: c2 = 0 cls_wei = np.load("output_files/stdp_weights{}.npy".format(c1)) mx = max(cls_wei) for conn in cls_list: # if ismember(diff_ind,conn[0]): if (ismember(diff_ind2,conn[0]) and np.sign(c1-0.5) * np.sign(diff_thr2[int(conn[0])]) == -1.): # conn[2]=0.08*cls_wei[c2]/mx conn[2] = 0.08#*diff_thr2[conn[0]]/36. # conn[2]=2.*cls_wei[c2] c2 += 1 c1 += 1 conn_stdp_list = list(conn_stdp_list) ## Output Layer Inhibitory Connection List conn_output_inh = np.load("output_files/conn_output_inh.npy") ## Spike Source to Encoding Layer p.Projection(spike_source,enc_layer,p.FromListConnector(conn_inp_enc)) ## Encoding Layer to Filtering Layer p.Projection(enc_layer, filt_layer, p.OneToOneConnector(weights=wei_enc_filt, delays=del_enc_filt)) ## Filtering Layer Inhibitory p.Projection(filt_layer,filt_layer, p.FromListConnector(conn_filt_inh), target="inhibitory") ## STDP Connection between Filtering Layer and Output Layer stdp_proj = [] for j in range(n_cl): stdp_proj.append(p.Projection(filt_layer, out_layer_exc[j], p.FromListConnector(conn_stdp_list[j]))) ## Connection between Output Layer neurons c = 0 for i in range(n_cl): p.Projection(out_layer_exc[i], out_layer_inh[i], p.OneToOneConnector(weights=wei_cls_exc, delays=del_cls_exc)) iter_array = [j for j in range(n_cl) if j != i] for j in iter_array: p.Projection(out_layer_inh[i], out_layer_exc[j], p.FromListConnector(conn_output_inh[c]), target="inhibitory") c+=1 ## Noisy poisson connection to encoding layer if randomness == True: # if True: connect noise to network # if False: don't use noise in network p.Projection(poisson_input, enc_layer, p.FixedProbabilityConnector(p_connect=prob_noise_poi_conn, weights=wei_noise_poi, delays = del_noise_poi)) ############################################################################### ## Simulation ############################################################################### p.run(SIM_TIME) Enc_Spikes = enc_layer.getSpikes() Filt_Exc_Spikes = filt_layer.getSpikes() #Filt_Inh_Spikes = filt_layer_inh.getSpikes() Out_Spikes = [[] for i in range(n_cl)] for i in range(n_cl): Out_Spikes[i] = out_layer_exc[i].getSpikes() p.end() ############################################################################### ## Plot ############################################################################### ## Plot 1 if 0: pylab.figure() pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title('Encoding Layer Raster Plot') pylab.hold(True) pylab.plot([i[1] for i in Enc_Spikes], [i[0] for i in Enc_Spikes], ".b") pylab.hold(False) #pylab.axis([-10,c*SIM_TIME+100,-1,numInp+numOut+numInp+3]) pylab.show() ## Plot 2-1 if 0: pylab.figure() pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title('Filtering Layer Raster Plot') pylab.plot([i[1] for i in Filt_Exc_Spikes], [i[0] for i in Filt_Exc_Spikes], ".b") #pylab.axis([-10,c*SIM_TIME+100,-1,numInp+numOut+numInp+3]) pylab.show() ## Plot 2-2 pylab.figure() pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title('Filtering Layer Raster Plot') pylab.hold(True) pylab.plot([i[1] for i in Filt_Exc_Spikes], [i[0] for i in Filt_Exc_Spikes], ".b") time_ind=[i*time_int_trials for i in range(len(outputs))] for i in range(len(time_ind)): pylab.plot([time_ind[i],time_ind[i]],[0,2000],"r") pylab.hold(False) #pylab.axis([-10,c*SIM_TIME+100,-1,numInp+numOut+numInp+3]) pylab.show() ## Plot 3-1 if 0: pylab.figure() pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title('Association Layer Raster Plot\nTest for Trial Numbers {}-{}'.format(t1,t2)) pylab.hold(True) c=0 for array in Out_Spikes: pylab.plot([i[1] for i in array], [i[0]+c for i in array], ".b") c+=0.2 time_cls=[j*time_int_trials+i for j in range(len(outputs)) for i in range(int(time_int_trials))] cls_lb=[outputs[j]+0.4 for j in range(len(outputs)) for i in range(int(time_int_trials))] time_ind=[i*time_int_trials for i in range(len(outputs))] for i in range(len(time_ind)): pylab.plot([time_ind[i],time_ind[i]],[0,10],"r") #pylab.plot(time_cls,cls_lb,".") pylab.hold(False) pylab.axis([-10,SIM_TIME+100,-1,n_pop+2]) pylab.show() ## Plot 3-2 pylab.figure() pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title(('Association Layer Raster Plot\n', 'Test for Samples {}-{}').format(t1,t2)) pylab.hold(True) pylab.plot([i[1] for i in Out_Spikes[0]], [i[0] for i in Out_Spikes[0]], ".b") pylab.plot([i[1] for i in Out_Spikes[1]], [i[0] + 0.2 for i in Out_Spikes[1]], ".r") time_ind = [i * time_int_trials for i in range(len(outputs))] for i in range(len(time_ind)): pylab.plot([time_ind[i], time_ind[i]], [0,n_pop], "k") #pylab.plot(time_cls,cls_lb,".") pylab.hold(False) pylab.axis([-10, SIM_TIME+100, -1, n_pop + 2]) pylab.legend(["AN1","AN2" ]) pylab.show() sum_output = [[] for i in range(n_cl)] for i in range(n_trials): t_st = i * time_int_trials t_end = t_st + time_int_trials for j in range(n_cl): sum_output[j].append(np.sum( [1 for n, t in Out_Spikes[j] if t >= t_st and t < t_end]) ) ## Plot 4 if 0: # pylab.figure() # pylab.hold(True) # pylab.plot(sum_output[0], "b.") # pylab.plot(sum_output[1], "r.") # out_cl0 = [i for i in range(len(outputs)) if outputs[i] == 0] # out_cl1 = [i for i in range(len(outputs)) if outputs[i] == 1] # pylab.plot(out_cl0,[-2 for i in range(len(out_cl0))], "xb") # pylab.plot(out_cl1,[-2 for i in range(len(out_cl1))], "xr") # pylab.hold(False) # pylab.title("Total spikes at each AN population for each trial") # pylab.xlabel("Trials") # pylab.ylabel("Firing Rate") # pylab.legend(["Cl0","Cl1","Winning Cl 0", "Winning Cl 1"]) # pylab.axis([-2, n_trials + 2, -4, max(max(sum_output)) + 30]) # pylab.show() pylab.figure() pylab.hold(True) pylab.plot(sum_output[0], "b^") pylab.plot(sum_output[1], "r^") #pylab.plot(sum_output[0],"b") #pylab.plot(sum_output[1],"r") ppp0 = np.array(sum_output[0]) ppp1 = np.array(sum_output[1]) out_cl0 = [i for i in range(len(outputs)) if outputs[i] == 0] out_cl1 = [i for i in range(len(outputs)) if outputs[i] == 1] pylab.plot(out_cl0, ppp0[out_cl0], "bs") pylab.plot(out_cl1, ppp1[out_cl1], "rs") pylab.hold(False) pylab.title("Total spikes at each AN population for each trial") pylab.xlabel("Trials") pylab.ylabel("Spike Count for Each Trial") pylab.legend(["Cls 0", "Cls 1", "Actual Winner Cls 0", "Actual Winner Cls 1"]) pylab.axis([-2, n_trials + 2, -4, max(max(sum_output)) + 30]) pylab.show() ## Check Classification rate s = np.array(sum_output) cl = np.floor((np.sign(s[1] - s[0]) + 1) / 2) r_cl = np.array(outputs) wrong = np.sum(np.abs(cl - r_cl)) rate = (n_trials - wrong) / n_trials print("success rate: {}%".format(abs(rate)*100.)) print("cl:\n", cl) print("r_cl:\n", r_cl) ## Plot 5 if 0: pylab.figure() cf = 0.1 pylab.hold(True) cls_wei0 = np.load("output_files/stdp_weights{}.npy".format(0)) mx = max(cls_wei0) cls_wei0 = cf * cls_wei0 / mx cls_wei1 = np.load("output_files/stdp_weights{}.npy".format(1)) mx = max(cls_wei1) cls_wei1 = cf * cls_wei1/ mx l = min(len(cls_wei0), len(cls_wei1)) new_array0 = [cls_wei0[i] for i in range(l) if cls_wei0[i] > cls_wei1[i]] x0 = [i for i in range(l) if cls_wei0[i] > cls_wei1[i]] new_array1 = [cls_wei1[i] for i in range(l) if cls_wei1[i] > cls_wei0[i]] x1 = [i for i in range(l) if cls_wei1[i] > cls_wei0[i]] pylab.plot(x0, new_array0, "gx") pylab.plot(x1, new_array1, "bx") #for i in range(2): # cls_wei=np.load("stdp_weights{}.npy".format(i)) # mx=max(cls_wei) # cls_wei=0.05*cls_wei/mx # pylab.plot(cls_wei,"x") pylab.axis([-10, 2000, -0.1, 0.15]) pylab.hold(False) pylab.show() ## Plot 7 if 0: sum_filt = [[0 for i in range(n_feature * n_pop)] for j in range(n_cl)] sum_filt = np.array(sum_filt) for i in range(n_trials): t_st = i * time_int_trials t_end = t_st + time_int_trials cl = outputs[i] for n, t in Filt_Exc_Spikes: if t >= t_st and t < t_end: sum_filt[int(cl),int(n)] = sum_filt[(cl),int(n)] + 1 a4=sum_filt[0] b4=sum_filt[1] pylab.figure() pylab.hold(True) pylab.plot(a4,"b.") pylab.plot(b4,"r.") pylab.xlabel('Neuron ID') pylab.ylabel('Total Firing Rates Through Trials') pylab.title("Total Spiking Activity of Neurons at Decomposition Layer for Each Class") pylab.hold(False) pylab.legend(["Activity to AN1","Activity to AN2"]) pylab.show() return rate
tau_m = 20. # (ms) cm = 1. # (uF/cm^2) g_leak = 5e-5 # (S/cm^2) E_leak = -60 # (mV) v_thresh = -50. # (mV) v_reset = -60. # (mV) t_refrac = 10. # (ms) (clamped at v_reset) tau_exc = 5. # (ms) tau_inh = 10. # (ms) cell_params = { 'tau_m' : tau_m, 'tau_syn_E' : tau_exc, 'tau_syn_I' : tau_inh, 'v_rest' : E_leak, 'v_reset' : v_reset, 'v_thresh' : v_thresh, 'cm' : cm, 'tau_refrac' : t_refrac} ############################### setup simulation sim.setup(timestep=dt, min_delay=1.0) # build populations cell_type = sim.IF_cond_exp num_of_cells = 10 layers = 10 populations = [] for i in range(layers): curr_pop = sim.Population(num_of_cells, cell_type, cell_params, label="Population {0}".format(i)) populations.append(curr_pop) curr_pop.initialize('v', v_reset) if mode == "spikes": populations[-1].record() else: populations[0].record_v()
import pyNN.spiNNaker as p INJECTOR_LABEL = "injector" RECEIVER_LABEL = "receiver" # declare python code when received spikes for a timer tick def receive_spikes(label, time, neuron_ids): for neuron_id in neuron_ids: print("Received spike at time {} from {}-{}" "".format(time, label, neuron_id)) p.setup(timestep=1.0) p1 = p.Population(1, p.IF_curr_exp(), label="pop_1") input_injector = p.Population(1, p.external_devices.SpikeInjector(), label=INJECTOR_LABEL) # set up python live spike connection live_spikes_connection = p.external_devices.SpynnakerLiveSpikesConnection( receive_labels=[RECEIVER_LABEL]) # register python receiver with live spike connection live_spikes_connection.add_receive_callback(RECEIVER_LABEL, receive_spikes) input_proj = p.Projection(input, p1, p.OneToOneConnector(), p.StaticSynapse(weight=5, delay=3)) p1.record(["spikes", "v"]) p.run(50)
import pyNN.spiNNaker as p from matplotlib import pylab p.setup(timestep=1.0) input_pop = p.Population(1, p.SpikeSourceArray, cellparams={"spike_times": [0]}, label="input") cell_params_lif = {'cm' : 0.25, # nF 'i_offset' : 0.0, 'tau_m' : 20.0, 'tau_refrac': 2.0, 'tau_syn_E' : 5.0, 'tau_syn_I' : 5.0, 'v_reset' : -70.0, 'v_rest' : -65.0, 'v_thresh' : -50.0 } pop = p.Population(2, p.IF_curr_exp, cellparams=cell_params_lif, label="pop") connections = list() connections.append(p.Projection(input_pop, pop, p.AllToAllConnector(weights=[0.3, 1.0], delays=[1, 17]))) connections.append(p.Projection(input_pop, pop, p.AllToAllConnector(weights=[1.0, 0.7], delays=[2, 15]))) connections.append(p.Projection(input_pop, pop, p.AllToAllConnector(weights=[0.7, 0.3], delays=[3, 33]))) pre_weights = list() pre_delays = list() for connection in connections: pre_weights.append(connection.getWeights()) pre_delays.append(connection.getDelays()) p.run(100) post_weights = list()
import pyNN.spiNNaker as p import pylab import numpy from pyNN.random import RandomDistribution p.setup(timestep=0.1) n_neurons = 1000 n_exc = int(round(n_neurons * 0.8)) n_inh = int(round(n_neurons * 0.2)) weight_exc = 0.1 weight_inh = -5.0 * weight_exc pop_exc = p.Population(n_exc, p.IF_curr_exp, {}, label="Excitatory") pop_inh = p.Population(n_inh, p.IF_curr_exp, {}, label="Inhibitory") stim_exc = p.Population(n_exc, p.SpikeSourcePoisson, {"rate": 1000.0}, label="Stim_Exc") stim_inh = p.Population(n_inh, p.SpikeSourcePoisson, {"rate": 1000.0}, label="Stim_Inh") delays_exc = RandomDistribution("normal", [1.5, 0.75], boundaries=(1.0, 14.4)) weights_exc = RandomDistribution("normal", [weight_exc, 0.1], boundaries=(0, numpy.inf)) conn_exc = p.FixedProbabilityConnector(0.1, weights=weights_exc, delays=delays_exc) delays_inh = RandomDistribution("normal", [0.75, 0.375], boundaries=(1.0, 14.4)) weights_inh = RandomDistribution("normal", [weight_inh, 0.1], boundaries=(-numpy.inf, 0)) conn_inh = p.FixedProbabilityConnector(0.1, weights=weights_inh, delays=delays_inh) p.Projection(pop_exc, pop_exc, conn_exc, target="excitatory") p.Projection(pop_exc, pop_inh, conn_exc, target="excitatory") p.Projection(pop_inh, pop_inh, conn_inh, target="inhibitory") p.Projection(pop_inh, pop_exc, conn_inh, target="inhibitory") conn_stim = p.OneToOneConnector(weights=weight_exc, delays=1.0) p.Projection(stim_exc, pop_exc, conn_stim, target="excitatory") p.Projection(stim_inh, pop_inh, conn_stim, target="excitatory")
'''' THIS CODE TRIES TO IMPLEMENT THE LAST APPLICATION FROM NESSLER'S STDP-EM PAPER''' Neurons=int(raw_input('Enter the total No.Of neurons(700) in the simulation: ')) n= int(raw_input('Enter No of Patterns you totally want(n): ')) pattern_gap= int(raw_input('Enter the duration after which next pattern should appear(pattern_gap) 250: ')) k=int(raw_input('Enter the pattern length, for now give 50)(k): ')) weight_to_spike=1.0 import pyNN.spiNNaker as p import pylab import pickle import numpy as np p.setup(timestep = 1.0) # runs using a 1.0ms timestep # here the default parameters of the IF_curr_exp are used. These are: # In PyNN, the neurons are declared in terms of a population of a number of neurons with similar properties cell_params_lif = {'cm' : 0.35, # nF #capacitance of LIF neuron in nF 'i_offset' : 0.0, #A base input current to add each timestep.(What current??) 'tau_m' : 4, #The time-constant of the RC circuit, in ms 'tau_refrac': 1, #The refractory period in ms 'tau_syn_E' : 1, #The excitatory input current decay time-constant 'tau_syn_I' : 10, #The inhibitory input current decay time-constant 'v_reset' : -70.6, #The voltage to set the neuron at immediately after a spike 'v_rest' : -65, #The ambient rest voltage of the neuron 'v_thresh' : -50. #The threshold voltage at which the neuron will spike. } ################################################%%%%%%%%%%%%CONSTRUCTING THE PATTERN%%%%%%%%%%%############%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% b=pickle.load(open('/home/ruthvik/Desktop/spikefile_700_50ms_6Hz','rb'))
def train(spikeTimes,untrained_weights=None): organisedStim = {} labelSpikes = [] #spikeTimes = generate_data() #for j in range(5): # labelSpikes #labelSpikes[label] = [(input_len-1)*v_co+1,(input_len-1)*v_co*2+1,(input_len-1)*v_co*3+1,] if untrained_weights == None: untrained_weights = RandomDistribution('uniform', low=wMin, high=wMaxInit).next(input_size*output_size) #untrained_weights = RandomDistribution('normal_clipped', mu=0.1, sigma=0.05, low=wMin, high=wMaxInit).next(input_size*output_size) untrained_weights = np.around(untrained_weights, 3) #saveWeights(untrained_weights, 'untrained_weightssupmodel1traj') print ("init!") print "length untrained_weights :", len(untrained_weights) if len(untrained_weights)>input_size: training_weights = [[0 for j in range(output_size)] for i in range(input_size)] #np array? size 1024x25 k=0 for i in range(input_size): for j in range(output_size): training_weights[i][j] = untrained_weights[k] k += 1 else: training_weights = untrained_weights connections = [] for n_pre in range(input_size): # len(untrained_weights) = input_size for n_post in range(output_size): # len(untrained_weight[0]) = output_size; 0 or any n_pre connections.append((n_pre, n_post, training_weights[n_pre][n_post], __delay__)) #index runTime = int(max(max(spikeTimes))/3)+100 ##################### sim.setup(timestep=1) #def populations layer1=sim.Population(input_size,sim.SpikeSourceArray, {'spike_times': spikeTimes},label='inputspikes') layer2=sim.Population(output_size,sim.IF_curr_exp,cellparams=cell_params_lif,label='outputspikes') #supsignal=sim.Population(output_size,sim.SpikeSourceArray, {'spike_times': labelSpikes},label='supersignal') #def learning rule stdp = sim.STDPMechanism( weight=untrained_weights, #weight=0.02, # this is the initial value of the weight #delay="0.2 + 0.01*d", timing_dependence=sim.SpikePairRule(tau_plus=tauPlus, tau_minus=tauMinus,A_plus=aPlus, A_minus=aMinus), #weight_dependence=sim.MultiplicativeWeightDependence(w_min=wMin, w_max=wMax), weight_dependence=sim.AdditiveWeightDependence(w_min=wMin, w_max=wMax), #weight_dependence=sim.AdditiveWeightDependence(w_min=0, w_max=0.4), dendritic_delay_fraction=1.0) #def projections #stdp_proj = sim.Projection(layer1, layer2, sim.FromListConnector(connections), synapse_type=stdp) stdp_proj = sim.Projection(layer1, layer2, sim.AllToAllConnector(), synapse_type=stdp) inhibitory_connections = sim.Projection(layer2, layer2, sim.AllToAllConnector(allow_self_connections=False), synapse_type=sim.StaticSynapse(weight=inhibWeight, delay=__delay__), receptor_type='inhibitory') #stim_proj = sim.Projection(supsignal, layer2, sim.OneToOneConnector(), # synapse_type=sim.StaticSynapse(weight=stimWeight, delay=__delay__)) layer1.record(['spikes']) layer2.record(['v','spikes']) #supsignal.record(['spikes']) sim.run(runTime) print("Weights:{}".format(stdp_proj.get('weight', 'list'))) weight_list = [stdp_proj.get('weight', 'list'), stdp_proj.get('weight', format='list', with_address=False)] neo = layer2.get_data(["spikes", "v"]) spikes = neo.segments[0].spiketrains v = neo.segments[0].filter(name='v')[0] #neostim = supsignal.get_data(["spikes"]) #spikestim = neostim.segments[0].spiketrains neoinput= layer1.get_data(["spikes"]) spikesinput = neoinput.segments[0].spiketrains plt.close('all') pplt.Figure( pplt.Panel(spikesinput,xticks=True, yticks=True, markersize=2, xlim=(0,runTime),xlabel='(a) Spikes of Input Layer'), #pplt.Panel(spikestim, xticks=True, yticks=True, markersize=2, xlim=(0,runTime),xlabel='(c) Spikes of Supervised Layer'), pplt.Panel(spikes, xticks=True, xlabel="(b) Spikes of Output Layer", yticks=True, markersize=2, xlim=(0,runTime)), pplt.Panel(v, ylabel="Membrane potential (mV)", xticks=True, yticks=True, xlim=(0,runTime),xlabel='(c) Membrane Potential of Output Layer\nTime (ms)'), title="Two Training", annotations="Twoway Training" ).save('SNN_DVS_un/plot_for_twoway/'+str(trylabel)+'_training.png') #plt.hist(weight_list[1], bins=100) plt.close('all') plt.hist([weight_list[1][0:input_size], weight_list[1][input_size:input_size*2], weight_list[1][input_size*2:]], bins=20, label=['neuron 0', 'neuron 1', 'neuron 2'], range=(0, wMax)) plt.title('weight distribution') plt.xlabel('Weight value') plt.ylabel('Weight count') #plt.show() #plt.show() sim.end() return weight_list[1]
# helper function for populations def get_pops(n_in_pop, n_used_neuronblocks=7): l = [] model_type = pynn.IF_curr_exp for _ in xrange(n_used_neuronblocks): pop = pynn.Population(n_in_pop, model_type, params) pop.record() l.append(pop) return l pynn.setup(timestep=0.1) duration = 1 * 3000 # ms params = {"cm": 0.2, "v_reset": -70, "v_rest": -50, "v_thresh": -47} n_in_pop = 12 # prepare populations all_pops = [] all_pops.extend(get_pops(n_in_pop)) all_pops.extend(get_pops(n_in_pop)) # -> makes 14 population # synaptic weight, must be tuned for spinnaker w_exc = 0.25
#!/usr/bin/env python import IPython import pyNN.spiNNaker as p from pylab import * nn_pre = 1000 nn_post = 16 nn_teach = 16 # neuron count timestep = 0.6 duration = 100 * 1000 p.setup(timestep=timestep, min_delay=timestep, max_delay=10.0) #p.set_number_of_neurons_per_core("IF_cond_exp", 12) cell_params = { 'cm': 0.25, # nF 'i_offset': 0.0, 'tau_m': 10.0, 'tau_refrac': 2.0, 'tau_syn_E': 2.5, 'tau_syn_I': 2.5, 'v_reset': -70.0, 'v_rest': -65.0, 'v_thresh': -54.4 } cellparams_pclayer = { 'tau_refrac': 2.58198889747, 'tau_m': 40.343576523, 'e_rev_E': 0.0, 'cm': 0.645497224368, 'e_rev_I': -80.0,
import pylab import numpy from numpy.random import randint from numpy import where import time import os runtime = 10000 weight_to_spike = 3. timestep = 1. min_delay = 1. max_delay = 20. sim.setup(timestep=timestep, min_delay=min_delay, max_delay=max_delay) max_weight = 10. min_weight = 0.000001 a_plus = 0.1 a_minus = 0.12 tau = 20. conn_prob = 10. / 100. num_neurons = 200 cell_type = sim.IZK_curr_exp exc_params = { 'a': 0.02,
import pyNN.spiNNaker as sim import pyNN.utility.plotting as plot import matplotlib.pyplot as plt import threading from random import uniform from time import sleep from pykeyboard import PyKeyboard sim.setup(timestep=1.0) sim.set_number_of_neurons_per_core(sim.IF_curr_exp, 100) input1 = sim.Population(6, sim.external_devices.SpikeInjector(), label="stateSpikeInjector") pre_pop = sim.Population(6, sim.IF_curr_exp(tau_syn_E=100, tau_refrac=50), label="statePopulation") post_pop = sim.Population(1, sim.IF_curr_exp(), label="actorPopulation") sim.external_devices.activate_live_output_for(pre_pop, database_notify_host="localhost", database_notify_port_num=19996) sim.external_devices.activate_live_output_for(input1, database_notify_host="localhost", database_notify_port_num=19998) timing_rule = sim.SpikePairRule(tau_plus=20.0, tau_minus=20.0, A_plus=0.5, A_minus=0.5)
# ******************** NESIM-RT ******************** # # ----------------------------------------------------- # # A Real Time Spiking Neural Network Simulator # # ----------------------------------------------------- # # # # Author: Daniel Jesus Rosa Gallardo # # email: [email protected] # # Date: 2019 # # Cadiz University - Spain # # # ################################################################# import pyNN.spiNNaker as sim import pyNN.utility.plotting as plot import matplotlib.pyplot as plt sim.setup(timestep=0.25) cell_params_lif = { 'cm': 0.25, 'i_offset': 0, 'tau_m': 20, 'tau_refrac': 2, 'tau_syn_E': 50, 'tau_syn_I': 5, 'v_reset': -70, 'v_rest': -65, 'v_thresh': -55 } sim.set_number_of_neurons_per_core(sim.IF_curr_exp, 100)
:func:`NeuralModel.write_neural_c_file` and :func:`NeuralModel.write_neural_header_file` can be used to generate c code to be executed on SpiNNaker. .. moduleauthor:: Francesco Galluppi email: [email protected] """ import pacman import os import math import pyNN.spiNNaker as p import model_templates DEBUG = False dao = p.setup(db_name='/tmp/model_creator.db') FALSE = 0 TRUE = 1 ONE_PER_POPULATION = 2 class NeuralModel(): """ Initialises a new neural model: :param model_name: A string identifiying the Neural Model class in PyNN :type model_name: str. :param default_neurons_per_core: The default number of neurons of this type \ modelled by a single core :param image_name: (str) the name of the associated aplx image
AbstractOutgoingEdgeSameContiguousKeysRestrictor.__init__(self) def get_outgoing_edge_constraints(self, partitioned_edge, graph_mapper): constraints = AbstractOutgoingEdgeSameContiguousKeysRestrictor\ .get_outgoing_edge_constraints( self, partitioned_edge, graph_mapper) constraints.append(KeyAllocatorFixedKeyAndMaskConstraint( [KeyAndMask(0x42000000, 0xFFFF0000)])) return constraints def is_virtual_vertex(self): return True def model_name(self): return "My External Device" import pyNN.spiNNaker as p from pacman.model.partitionable_graph.multi_cast_partitionable_edge \ import MultiCastPartitionableEdge p.setup(1.0) device = p.Population(20, MyExternalDevice, {"spinnaker_link_id": 0}, label="external device") pop = p.Population(20, p.IF_curr_exp, {}, label="population") p.Projection(device, pop, p.OneToOneConnector()) p.run(10)
import pyNN.spiNNaker as pynn # helper function for populations def get_pops(n_in_pop, n_used_neuronblocks=7): l = [] model_type = pynn.IF_cond_exp for _ in xrange(n_used_neuronblocks): pop = pynn.Population(n_in_pop, model_type, params) pop.record() l.append(pop) return l pynn.setup(timestep=1.0) duration = 1 * 30000 # ms params = { 'cm': 0.2, 'v_reset': -70, 'v_rest': -50, 'v_thresh': -47, 'e_rev_I': -60, 'e_rev_E': -40, } n_in_pop = 12
def train_snn(### Settings data_dir = "data/X_train_zied.npy", cls_dir = "data/y_train_zied.npy", data = "load", # pass data as argument cls = "load", # pass labels as argument save = True, # True to save all parameters of the network randomness = True, reverse_src_del = False, use_old_weights = False, rand_data = False, ### Parameters n_training = 2, # How many times the samples will be iterated ts = 1., # Timestep of Spinnaker (ms) trial_num = 10, # How many samples (trials) from data used # Network n_feature = 80, # Number of features (= 4 features * 20 neurons) # Weights wei_src_enc = .2, # From Source Array at input to Encoding Layer(Exc) wei_enc_filt = .6, # From Encoding Layer to Filtering Layer Exc neurons (Exc) wei_filt_inh = 0.03, # From Filtering Layer Inh neurons to Exc neurons (Inh) wei_init_stdp = .0, # From Filtering Layer Exc neurons to Output Layer (Exc) wei_cls_exc = 0.9, # From Output Layer Exc neurons to Inh neurons (Exc) wei_cls_inh = 50,#0.1,#,10 # From Output Layer Inh neurons to Exc neurons (Inh) wei_source_outp = 10., # From Source Array at output to Output Layer Exc neurons (Exc) wei_noise_poi = 0.02, # Delays del_init_stdp = 1., del_source_outp = 1., del_noise_poi = 1., # Connection Probabilities prob_filt_inh = .4, # Prob of connectivity inhibitory connections at FilT_Layer prob_stdp = 1., # Prob of STDP connections prob_output_inh = .7, # Prob of inhibitory connections at Output Layer prob_noise_poi_conn = 0.02, ## STDP Parameters tau_pl = 5., stdp_w_max = 0.4, # default 0.4 stdp_w_min = 0.0, # default 0.0 stdp_A_pl = 2,#0.02,# 0.01, # default 0.01 (below 0.01 weights don't change) # => minus in order to get symmetric curve # Data Extraction scale_data = 2.): # Scale features into [0-scale_data] range # BUG fix: # n_feature is somehow a tuple # try: # trial_num = trial_num[0] # except Exception as e: # print("\n\n\n EXCEPTION TRIGGERED !!!! \n\n\n") # pass ############################################################################ ## Function Definitions ############################################################################ def gaussian(x, mu, sig): return np.float16(np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))) def calc_pop_code(feature, rng1, rng2, num): interval=np.float(rng2-rng1)/num means=np.arange(rng1+interval, rng2+interval, interval) pop_code=[gaussian(feature,mu,0.025) for mu in means] return pop_code def PoissonTimes2(t_str=0., t_end=100., rate=10., seed=1.): times = [t_str] rng = np.random.RandomState(seed=seed) cont = True while cont == True: t_next = np.floor(times[-1] + 1000. * next_spike_times(rng,rate)) if t_next < t_end - 30: times.append(t_next[0]) else: cont=False return times[1:] def PoissonTimes(t_str=0., t_end=100., rate=10., seed=1., max_rate=0): if rate>0: interval = (t_end - t_str + 0.) / rate # Add additional reverse_src_del if reverse_src_del == True: times = np.arange(t_str + 30, t_end - 40, interval) # add reverse proportional delay rev_del = np.ceil(max_rate / rate) if rev_del != np.inf: times += rev_del else: times = np.arange(t_str + 30, t_end - 40, interval) return list(times) else: return [] def next_spike_times(rng,rate): return -np.log(1.0-rng.rand(1)) / rate def ismember(a, b): b=[b] bind = {} for i, elt in enumerate(b): if elt not in bind: bind[elt] = i aa=[bind.get(itm, -1) for itm in a] return sum(np.array(aa)+1.) def get_data(trial_num, test_num=10): # trial_num: number of training samples # test_num: number of test samples pass def rand_sample_of_train_set(n): # n: number of features # Return: np.array containing n samples of the training set X = np.load(data_dir) y = np.load(cls_dir) idx = np.random.randint(len(X), size=n) return X[idx], y[idx] ############################################################################ ## Parameters ############################################################################ # Load training data # only load n_rand_data features of training set if rand_data == True: data, cls = rand_sample_of_train_set(trial_num) # load all features of training set else: # load data if its not in passed as fuct argument if data == "load" and cls == "load": data = np.load(data_dir) cls = np.load(cls_dir) # Simulation Parameters trial_num = len(cls) # How many samples (trials) from data will be presented #n_training = 1 # How many times the samples will be iterated n_trials = n_training * trial_num # Total trials time_int_trials = 200. # (ms) Time to present each trial data SIM_TIME = n_trials * time_int_trials # Total simulation time (ms) #ts = 1. # Timestep of Spinnaker (ms) min_del = ts max_del = 144 * ts p.setup(timestep=ts, min_delay=min_del, max_delay=max_del) ## Neuron Numbers #n_feature = 80 # Number of features (= 4 features * 20 neurons) # => 20 neuros: resolution of encoding n_pop = data.shape[1] #4 # Number of neurons in one population (X dim) n_cl = 2 # Number of classes at the output ## Connection Parameters # Weights # wei_src_enc = .2 # From Source Array at input to Encoding Layer(Exc) # wei_enc_filt = .6 # From Encoding Layer to Filtering Layer Exc neurons (Exc) # wei_filt_inh = 0.03 # From Filtering Layer Inh neurons to Exc neurons (Inh) # wei_init_stdp = .0 # From Filtering Layer Exc neurons to Output Layer (Exc) # wei_cls_exc = 0.9 # From Output Layer Exc neurons to Inh neurons (Exc) # wei_cls_inh = 10 # 0.1 # From Output Layer Inh neurons to Exc neurons (Inh) # wei_source_outp = 10. # From Source Array at output to Output Layer Exc neurons (Exc) # wei_noise_poi = 0.02 # Delays if randomness == True: # if True: calculate "del_src_enc" (randomly) new # if False: load previously saved "del_src_enc" if reverse_src_del == True: # calc delays erversly proportional to feature value del_src_enc = np.zeros(n_feature*n_pop) else: del_src_enc = [int(np.random.randint(n_pop)+1) for _ in range(n_feature*n_pop)] np.save("output_files/del_src_enc.npy", del_src_enc) else: #del_src_enc = np.load("output_files/del_src_enc.npy") del_src_enc = np.ones(n_feature*n_pop).astype(int) #[1 for _ in range(n_feature*n_pop)] del_enc_filt = ts del_filt_inh = ts # del_init_stdp = 1. del_cls_exc = ts del_cls_inh = ts # del_source_outp = 1. # del_noise_poi = 1. # Firing Rates noise_poi_rate = 10. max_fr_input = 100. # maximum firing rate at the input layer max_fr_rate_output = 20. # Maximum firing rate at output (supervisory signal) ## Connection Probabilities # prob_filt_inh = .4 # Prob of connectivity inhibitory connections at FilT_Layer # prob_stdp = 1. # Prob of STDP connections # prob_output_inh = .7 # Prob of inhibitory connections at Output Layer # prob_noise_poi_conn = 0.02 ## STDP Parameters # tau_pl = 0.3 # (0.2 - 0.3 works) tau_min = tau_pl # default tau_pl # stdp_w_max = 0.4 # default 0.4 # stdp_w_min = 0.0 # default 0.0 # stdp_A_pl = 0.01 # default 0.01 (below 0.01 weights don't change) stdp_A_min = -stdp_A_pl # default - stdp_A_pl # => minus in order to get symmetric curve ## Neuron Parameters cell_params_lif = {'cm': 0.25,# 0.25, 'i_offset': 0.0, 'tau_m': 20., 'tau_refrac': 2.0, 'tau_syn_E': 5.0, 'tau_syn_I': 5.0, 'v_reset': -70.0, 'v_rest': -65.0, 'v_thresh': -50#-50 } ############################################################################ ## Data Extraction ############################################################################ ## Extract Feature Data # scale_data = 2. # Scale features into [0-scale_data] range r,c = np.shape(data) data_rates = np.reshape(data, (1, r*c))[0] # Threshold (to keep spikes in range) thr_data_plus = 30 thr_data_minus = -10 #dd = [d if d<thr_data_plus else thr_data_plus for d in data_rates] #dd = [d if d>thr_data_minus else thr_data_minus for d in dd] # Shift and normalize data #dd2 = np.array(dd) - min(dd) dd2 = np.array(data_rates) - min(data_rates) dd2 = dd2 / max(dd2) * 2 new_data_rates = [] for r in dd2: new_data_rates += calc_pop_code(r, 0., scale_data, n_feature / (n_pop + 0.0)) data_rates = list(max_fr_input*np.array(new_data_rates)) ## Extract Class Data # load class vector #cls = np.load(path_y) cls = np.reshape(cls, (len(cls),1)) # create col vector r_cl, c_cl = np.shape(cls) #cls = list(np.reshape(cls, (1, r_cl * c_cl))[0] - 1) cls = list(np.reshape(cls, (1, r_cl * c_cl))[0]) ## The class and rate information to be used during the simulation outputs = n_training * cls[0:trial_num] # positiv, ints poi_rate = n_training * data_rates[0:trial_num * n_feature] ## Save parameters to be used in test parameter_dict = {"n_feature":n_feature, "n_pop":n_pop,"n_cl":n_cl, "wei_src_enc":wei_src_enc, "wei_enc_filt":wei_enc_filt, "wei_filt_inh":wei_filt_inh, "wei_cls_exc":wei_cls_exc, "wei_cls_inh":wei_cls_inh, "del_enc_filt":del_enc_filt, "del_init_stdp":del_init_stdp, "del_cls_exc":del_cls_exc, "del_cls_inh":del_cls_inh, "trial_num":trial_num, "time_int_trials":time_int_trials, "scale_data":scale_data, "ts":ts,"max_fr_input":max_fr_input, "max_fr_rate_output":max_fr_rate_output, "noise_poi_rate":noise_poi_rate, "max_fr_input":max_fr_input, "max_fr_rate_output":max_fr_rate_output, "prob_filt_inh":prob_filt_inh, "prob_stdp":prob_stdp, "prob_output_inh":prob_output_inh, "prob_noise_poi_conn":prob_noise_poi_conn, "tau_pl":tau_pl, "stdp_w_max":stdp_w_max, "stdp_w_min":stdp_w_min, "stdp_A_pl":stdp_A_pl, "wei_noise_poi":wei_noise_poi, "del_noise_poi":del_noise_poi, "thr_data_plus":thr_data_plus, "thr_data_minus":thr_data_minus } if save == True: np.save("output_files/parameters1",parameter_dict) np.save("output_files/parameters2",del_src_enc) ############################################################################ ## Create populations for different layers ############################################################################ poi_layer = [] enc_layer = [] filt_layer_exc = [] out_layer_exc = [] out_layer_inh = [] out_spike_source = [] # Calculate spike times at the input using the rate information coming from features spike_times = [[] for i in range(n_feature)] for i in range(n_trials): t_st = i * time_int_trials t_end = t_st + time_int_trials ind = i * n_feature for j in range(n_feature): times = PoissonTimes(t_st, t_end, poi_rate[ind+j], np.random.randint(100), max_rate=max(poi_rate)) for t in times: spike_times[j].append(t) if randomness == True: # if True: calculate "spike_times" (randomly) new # uf False: load previously saved "spike_times" np.save('output_files/spike_times_train.npy', spike_times) else: spike_times = np.load('output_files/spike_times_train.npy') # Calculate spike times at the output (as supervisory signal) out_spike_times=[[] for i in range(n_cl)] for i in range(n_trials): t_st = i * time_int_trials t_end = t_st + time_int_trials ind = outputs[i] times = PoissonTimes(t_st, t_end, max_fr_rate_output, np.random.randint(100)) for t in times: out_spike_times[int(ind)].append(t) if randomness == True: # if True: calculate "out_spike_times" (randomly) new # uf False: load previously saved "out_spike_times" np.save('output_files/out_spike_times.npy', out_spike_times) else: out_spike_times = np.load('output_files/out_spike_times.npy') # Spike source of input layer spike_source=p.Population(n_feature, p.SpikeSourceArray, {'spike_times':spike_times}, label='spike_source') # Spike source of output layer (Supervisory signal) for i in range(n_cl): out_spike_source.append(p.Population(1, p.SpikeSourceArray, {'spike_times':[out_spike_times[i]]}, label='out_spike_source')) # Encoding layer and Filtering Layer definitions enc_layer = p.Population(n_feature * n_pop, p.IF_curr_exp, cell_params_lif, label='enc_layer') filt_layer = p.Population(n_feature * n_pop, p.IF_curr_exp, cell_params_lif, label='filt_layer') # Excitatory and Inhibitory population definitions at the output for i in range(n_cl): out_layer_exc.append(p.Population(n_pop, p.IF_curr_exp, cell_params_lif, label='out_layer_exc{}'.format(i))) out_layer_inh.append(p.Population(n_pop, p.IF_curr_exp, cell_params_lif, label='out_layer_inh{}'.format(i))) out_layer_exc[i].record() # Noisy poisson population at the input poisson_input = p.Population(n_pop * 2, p.SpikeSourcePoisson, {"rate":noise_poi_rate}) # Record Spikes enc_layer.record() filt_layer.record() #enc_layer.initialize('v',p.RandomDistribution('uniform',[-51.,-69.])) #filt_layer.initialize('v',p.RandomDistribution('uniform',[-51.,-69.])) ############################################################################ ## Projections ############################################################################ ## Connection List from Spike Source Array to Encoding Layer conn_inp_enc=[] for i in range(n_feature): ind=i*n_pop for j in range(n_pop): conn_inp_enc.append([i,ind+j,wei_src_enc,del_src_enc[ind+j]]) if save == True: np.save("output_files/conn_inp_enc",conn_inp_enc) ## Connection List for Filtering Layer Inhibitory if randomness == True: # if True: calculate conn_filt_inh (randomly) new # uf False: load previously saved conn_filt_inh conn_filt_inh=[] for i in range(n_feature): rng1=i*n_pop rng2=rng1+n_pop inp=range(rng1,rng2) outp=range(0,rng1)+range(rng2,n_feature*n_pop) for ii in inp: for jj in outp: if prob_filt_inh>np.random.rand(): conn_filt_inh.append([ii,jj,wei_filt_inh,del_filt_inh]) if save == True: np.save('output_files/conn_filt_inh.npy', conn_filt_inh) else: conn_filt_inh = np.load('output_files/conn_filt_inh.npy') ## STDP Connection List if randomness == True: # if True: calculate conn_stdp_list (randomly) new # uf False: load previously saved conn_stdp_list conn_stdp_list=[[] for i in range(n_cl)] for i in range(n_cl): # For each population at output layer if use_old_weights == True: cl_weights = np.load( "output_files/stdp_weights{}.npy".format(i)) w = 0 for ii in range(n_pop * n_feature): # For each neuron in filtering layer for jj in range(n_pop): # For each neuron in each population of output layer if prob_stdp > np.random.rand(): # If the prob of connection is satiesfied # Make the connection if use_old_weights == True: conn_stdp_list[i].append([ii, jj, cl_weights[w], del_init_stdp]) w += 1 else: conn_stdp_list[i].append([ii, jj, wei_init_stdp, del_init_stdp]) if use_old_weights == False or save == True: np.save('output_files/conn_stdp_list.npy', conn_stdp_list) else: conn_stdp_list = np.load('output_files/conn_stdp_list.npy') ## Output Layer Inhibitory Connection List if randomness == True: # if True: calculate conn_stdp_list (randomly) new # uf False: load previously saved conn_stdp_list conn_output_inh = [[] for i in range(n_cl) for j in range(n_cl) if i!=j] c = 0 for i in range(n_cl): for j in range(n_cl): if i != j: for ii in range(n_pop): for jj in range(n_pop): if prob_output_inh > np.random.rand(): conn_output_inh[c].append([ii, jj, wei_cls_inh, del_cls_inh]) c += 1 if save == True: np.save("output_files/conn_output_inh.npy",conn_output_inh) else: conn_output_inh = np.load("output_files/conn_output_inh.npy") ## Spike Source to Encoding Layer p.Projection(spike_source, enc_layer, p.FromListConnector(conn_inp_enc)) ## Encoding Layer to Filtering Layer p.Projection(enc_layer, filt_layer, p.OneToOneConnector(weights=wei_enc_filt, delays=del_enc_filt)) ## Filtering Layer Inhibitory p.Projection(filt_layer, filt_layer, p.FromListConnector(conn_filt_inh), target="inhibitory") ## STDP Connection between Filtering Layer and Output Layer timing_rule = p.SpikePairRule(tau_plus=tau_pl, tau_minus=tau_min) weight_rule = p.AdditiveWeightDependence(w_max=stdp_w_max, w_min=stdp_w_min, A_plus=stdp_A_pl, A_minus=stdp_A_min) stdp_model = p.STDPMechanism(timing_dependence=timing_rule, weight_dependence=weight_rule) # STDP connection stdp_proj = [] for j in range(n_cl): stdp_proj.append( p.Projection(filt_layer,out_layer_exc[j], p.FromListConnector(conn_stdp_list[j]), synapse_dynamics = p.SynapseDynamics(slow=stdp_model))) ## Connection between Output Layer neurons c = 0 for i in range(n_cl): p.Projection(out_layer_exc[i], out_layer_inh[i], p.OneToOneConnector(weights=wei_cls_exc, delays=del_cls_exc)) iter_array=[j for j in range(n_cl) if j!=i] for j in iter_array: p.Projection(out_layer_exc[i], out_layer_exc[j], p.FromListConnector(conn_output_inh[c]), target="inhibitory") c += 1 ## Spike Source Array to Output for i in range(n_cl): p.Projection(out_spike_source[i], out_layer_exc[i], p.AllToAllConnector(weights=wei_source_outp, delays=del_source_outp)) iter_array = [j for j in range(n_cl) if j != i] for j in iter_array: p.Projection(out_spike_source[i], out_layer_exc[j], p.AllToAllConnector(weights=wei_source_outp, delays=del_source_outp), target="inhibitory") #for i in range(n_cl): # p.Projection(out_spike_source[i], out_layer_exc[i], p.AllToAllConnector\ # (weights=wei_source_outp, delays=del_source_outp)) # p.Projection(out_spike_source[i], out_layer_exc[1-i], p.AllToAllConnector\ # (weights=wei_source_outp, delays=del_source_outp),target="inhibitory") ## Noisy poisson connection to encoding layer if randomness == True: # if True: connect noise to network # if False: don't use noise in network p.Projection(poisson_input, enc_layer, p.FixedProbabilityConnector(p_connect=prob_noise_poi_conn, weights=wei_noise_poi, delays=del_noise_poi)) ############################################################################ ## Simulation ############################################################################ p.run(SIM_TIME) Enc_Spikes = enc_layer.getSpikes() Filt_Exc_Spikes = filt_layer.getSpikes() Out_Spikes = [[] for i in range(n_cl)] for i in range(n_cl): Out_Spikes[i] = out_layer_exc[i].getSpikes() wei = [] for i in range(n_cl): ww = stdp_proj[i].getWeights() if save == True: np.save("output_files/stdp_weights{}".format(i), ww) wei.append(ww) p.end() ############################################################################ ## Plot ############################################################################ ## Plot 1: Encoding Layer Raster Plot if 0: pylab.figure() pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title('Encoding Layer Raster Plot') pylab.hold(True) pylab.plot([i[1] for i in Enc_Spikes], [i[0] for i in Enc_Spikes], ".b") pylab.hold(False) #pylab.axis([-10,c*SIM_TIME+100,-1,numInp+numOut+numInp+3]) pylab.show() ## Plot 2-1: Filtering Layer Raster Plot if 0: pylab.figure() pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title('Filtering Layer Raster Plot') pylab.plot([i[1] for i in Filt_Exc_Spikes], [i[0] for i in Filt_Exc_Spikes], ".b") #pylab.axis([-10,c*SIM_TIME+100,-1,numInp+numOut+numInp+3]) pylab.show() ## Plot 2-2: Filtering Layer Layer Raster Plot if 0: pylab.figure() pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title('Filtering Layer Layer Raster Plot') pylab.hold(True) pylab.plot([i[1] for i in Filt_Exc_Spikes], [i[0] for i in Filt_Exc_Spikes], ".b") time_ind=[i*time_int_trials for i in range(len(outputs))] for i in range(len(time_ind)): pylab.plot([time_ind[i],time_ind[i]],[0,2000],"r") pylab.hold(False) #pylab.axis([-10,c*SIM_TIME+100,-1,numInp+numOut+numInp+3]) pylab.show() ## Plot 3-1: Output Layer Raster Plot if 0: pylab.figure() pylab.xlabel('Time (ms)') pylab.ylabel('Neuron') pylab.title('Output Layer Raster Plot') pylab.hold(True) c=0 for array in Out_Spikes: pylab.plot([i[1] for i in array], [i[0]+c for i in array], ".b") c+=0.2 pylab.hold(False) pylab.axis([-10,SIM_TIME+100,-1,n_pop+3]) pylab.show() ## Plot 4: STDP WEIGHTS if 1: pylab.figure() pylab.xlabel('Weight ID') pylab.ylabel('Weight Value') pylab.title('STDP weights at the end') #pylab.title('STDP weights at the end' + ' (trail_num=' + str(trial_num) + ')') pylab.hold(True) for i in range(n_cl): pylab.plot(wei[i]) pylab.hold(False) pylab.axis([-10,n_pop*n_feature*n_pop*0.5+10,-stdp_w_max,2*stdp_w_max]) str_legend=["To Cl {}".format(i+1) for i in range(n_cl)] pylab.legend(str_legend) #pylab.show() fname = 'plots/weights_1.png' while True: if os.path.isfile(fname): # if file already exists new_num = int(fname.split('.')[0].split('_')[1]) + 1 fname = fname.split('_')[0] + '_' +str(new_num) + '.png' # if len(fname) == 19: # new_num = int(fname.split('.')[0][-1]) + 1 # fname = fname.split('.')[0][:-1] + str(new_num) + '.png' # elif len(fname) == 20: # new_num = int(fname.split('.')[0][-2:]) + 1 # fname = fname.split('.')[0][:-2] + str(new_num) + '.png' # else: # new_num = int(fname.split('.')[0][-3:]) + 1 # fname = fname.split('.')[0][:-3] + str(new_num) + '.png' else: pylab.savefig(fname) break #pylab.figure() #pylab.xlabel('Weight ID') #pylab.ylabel('Weight Value') #pylab.title('STDP weights at the end') #pylab.hold(True) #pylab.plot(wei[0], "b") #pylab.plot(wei[1], "g") #pylab.hold(False) #pylab.axis([-10, n_pop * n_feature * n_pop * 0.5 + 10, # -stdp_w_max, 2 * stdp_w_max]) #pylab.legend(['To Cl 1','To Cl 2']) #pylab.show() ## Plot 5: Spike Source Spiking Times if 0: pylab.figure() pylab.hold(True) pylab.plot(out_spike_times[0], [1 for i in range(len(out_spike_times[0]))],"x") pylab.plot(out_spike_times[1], [1.05 for i in range(len(out_spike_times[1]))],"x") pylab.hold(False) pylab.title("Spike Source Spiking Times") pylab.axis([-100,SIM_TIME+100,-2,3]) pylab.show() ## Calculate spiking activity of each neuron to each class inputs sum_filt=[[0 for i in range(n_feature*n_pop)] for j in range(n_cl)] sum_filt=np.array(sum_filt) for i in range(n_trials): t_st = i * time_int_trials t_end = t_st + time_int_trials cl = outputs[i] for n,t in Filt_Exc_Spikes: if t >= t_st and t < t_end: sum_filt[int(cl),int(n)] = sum_filt[int(cl), int(n)] + 1 a4=sum_filt[0] b4=sum_filt[1] thr=20 diff_vec=np.abs(a4 - b4) diff_thr=[i if i>thr else 0. for i in diff_vec] diff_ind=[i for i in range(len(diff_thr)) if diff_thr[i]!=0] if save == True: np.save("output_files/diff_ind_filt",diff_ind) diff2 = a4 - b4 diff_thr2=[i if i > thr or i <- thr else 0. for i in diff2] diff_ind2=[i for i in range(len(diff_thr2)) if diff_thr2[i] != 0] if save == True: np.save("output_files/diff_ind_filt2",diff_ind2) np.save("output_files/diff_thr2",diff_thr2) ## Plot 6: Total Spiking Activity of Neurons at Decomposition Layer for Each Class if 0: a4=sum_filt[0] b4=sum_filt[1] pylab.figure() pylab.hold(True) pylab.plot(a4,"b") pylab.plot(b4,"r") pylab.xlabel('Neuron ID') pylab.ylabel('Total Firing Rates Through Trials') pylab.title("Total Spiking Activity of Neurons at Decomposition Layer ", "for Each Class") pylab.hold(False) pylab.legend(["Activity to AN1","Activity to AN2"]) pylab.show()
def run_test(w_list, cell_para, spike_source_data): #Du.set_trace() pop_list = [] p.setup(timestep=1.0, min_delay=1.0, max_delay=3.0) #input poisson layer input_size = w_list[0].shape[0] #print w_list[0].shape[0] #print w_list[1].shape[0] list = [] for j in range(input_size): list.append(spike_source_data[j]) pop_in = p.Population(input_size, p.SpikeSourceArray, {'spike_times': list}) pop_list.append(pop_in) #for j in range(input_size): #pop_in[j].spike_times = spike_source_data[j] #pop_in = p.Population(input_size, p.SpikeSourceArray, {'spike_times' : []}) #for j in range(input_size): #pop_in[j].spike_times = spike_source_data[j] #pop_list.append(pop_in) #count =0 #print w_list[0].shape[0] for w in w_list: input_size = w.shape[0] #count = count+1 #print count output_size = w.shape[1] #pos_w = np.copy(w) #pos_w[pos_w < 0] = 0 #neg_w = np.copy(w) #neg_w[neg_w > 0] = 0 conn_list_exci = [] conn_list_inhi = [] #k_size=in_size-out_size+1 for x_ind in range(input_size): for y_ind in range(output_size): weights = w[x_ind][y_ind] #for i in range(w.shape[1]): if weights > 0: conn_list_exci.append((x_ind, y_ind, weights, 1.)) elif weights < 0: conn_list_inhi.append((x_ind, y_ind, weights, 1.)) #print output_size pop_out = p.Population(output_size, p.IF_curr_exp, cell_para) if len(conn_list_exci) > 0: p.Projection(pop_in, pop_out, p.FromListConnector(conn_list_exci), target='excitatory') if len(conn_list_inhi) > 0: p.Projection(pop_in, pop_out, p.FromListConnector(conn_list_inhi), target='inhibitory') #p.Projection(pop_in, pop_out, p.AllToAllConnector(weights = pos_w), target='excitatory') #p.Projection(pop_in, pop_out, p.AllToAllConnector(weights = neg_w), target='inhibitory') pop_list.append(pop_out) pop_in = pop_out pop_out.record() run_time = np.ceil(np.max(spike_source_data)[0] / 1000.) * 1000 #print run_time p.run(run_time) spikes = pop_out.getSpikes(compatible_output=True) return spikes
import pyNN.spiNNaker as p import memory_for_parser as mem import parse as parser import get_text as sp NUMBER_PEOPLE = 10 NUMBER_LOCS = 10 NUMBER_OBJS = 10 max_delay = 100.0 p.setup(timestep=1.0, min_delay = 1.0, max_delay = max_delay) # parser.parse_no_run('spiNNaker', "daniel went to the bathroom. john went to the hallway. where is john?") sent1 = sp.getSentence(parser.words, "Say your first assertion now") sent2 = sp.getSentence(parser.words, "Say your second assertion now") sent3 = sp.getSentence(parser.words, "Ask your question now") parser.parse_no_run('spiNNaker', sent1+sent2+sent3) [input_populations, state_populations, output_populations, projections] = mem.memory(NUMBER_PEOPLE, NUMBER_LOCS, NUMBER_OBJS) cell_params_lif = {'cm' : 0.25, # nF 'i_offset' : 0.0, 'tau_m' : 20.0, 'tau_refrac': 2.0, 'tau_syn_E' : 5.0, 'tau_syn_I' : 5.0, 'v_reset' : -70.0,
def estimate_kb(cell_params_lif): cell_para = copy.deepcopy(cell_params_lif) random.seed(0) p.setup(timestep=1.0, min_delay=1.0, max_delay=16.0) run_s = 10. runtime = 1000. * run_s max_rate = 1000. ee_connector = p.OneToOneConnector(weights=1.0, delays=2.0) pop_list = [] pop_output = [] pop_source = [] x = np.arange(0., 1.01, 0.1) count = 0 trail = 10 for i in x: for j in range(trail): #trails for average pop_output.append(p.Population(1, p.IF_curr_exp, cell_para)) poisson_spikes = mu.poisson_generator(i * max_rate, 0, runtime) pop_source.append( p.Population(1, p.SpikeSourceArray, {'spike_times': poisson_spikes})) p.Projection(pop_source[count], pop_output[count], ee_connector, target='excitatory') pop_output[count].record() count += 1 count = 0 for i in x: cell_para['i_offset'] = i pop_list.append(p.Population(1, p.IF_curr_exp, cell_para)) pop_list[count].record() count += 1 pop_list[count - 1].record_v() p.run(runtime) rate_I = np.zeros(count) rate_P = np.zeros(count) rate_P_max = np.zeros(count) rate_P_min = np.ones(count) * 1000. for i in range(count): spikes = pop_list[i].getSpikes(compatible_output=True) rate_I[i] = len(spikes) / run_s for j in range(trail): spikes = pop_output[i * trail + j].getSpikes(compatible_output=True) spike_num = len(spikes) / run_s rate_P[i] += spike_num if spike_num > rate_P_max[i]: rate_P_max[i] = spike_num if spike_num < rate_P_min[i]: rate_P_min[i] = spike_num rate_P[i] /= trail ''' #plot_spikes(spikes, 'Current = 10. mA') plt.plot(x, rate_I, label='current',) plt.plot(x, rate_P, label='Poisson input') plt.fill_between(x, rate_P_min, rate_P_max, facecolor = 'green', alpha=0.3) ''' x0 = np.where(rate_P > 1.)[0][0] x1 = 4 k = (rate_P[x1] - rate_P[x0]) / (x[x1] - x[x0]) ''' plt.plot(x, k*(x-x[x0])+rate_P[x0], label='linear') plt.legend(loc='upper left', shadow=True) plt.grid('on') plt.show() ''' p.end() return k, x[x0], rate_P[x0]
(http://neuralensemble.org/trac/NeuroTools) Authors : Catherine Wacongne < *****@*****.** > Xavier Lagorce < *****@*****.** > April 2013 """ try: import pyNN.spiNNaker as sim except Exception: import spynnaker8 as sim from pyNN.utility.plotting import Figure, Panel import matplotlib.pyplot as plt # SpiNNaker setup sim.setup(timestep=1.0, min_delay=1.0, max_delay=10.0) # +-------------------------------------------------------------------+ # | General Parameters | # +-------------------------------------------------------------------+ # Population parameters model = sim.IF_curr_exp cell_params = { 'cm': 0.25, 'i_offset': 0.0, 'tau_m': 20.0, 'tau_refrac': 2.0, 'tau_syn_E': 5.0, 'tau_syn_I': 5.0,
#!/usr/bin/env python import IPython import pyNN.spiNNaker as p #import pyNN.neuron as p from pylab import * from pyNN.utility import init_logging from pyNN.utility import get_script_args from pyNN.errors import RecordingError spin_control = p.setup(timestep=0.1, min_delay=0.1, max_delay=4.0) #init_logging("logfile", debug=True) ifcell = p.Population(10, p.IF_cond_exp, { 'i_offset' : 0.1, 'tau_refrac' : 3.0, 'v_thresh' : -51.0, 'tau_syn_E' : 2.0, 'tau_syn_I': 5.0, 'v_reset' : -70.0, 'e_rev_E' : 0., 'e_rev_I' : -80.}, label = "myFinalCell_PLOT") spike_sourceE = p.Population(10, p.SpikeSourceArray, {'spike_times': [float(i) for i in range(0,9000,100)]}, label = "mySourceE_PLOT") spike_sourceI = p.Population(10, p.SpikeSourceArray, {'spike_times': [float(i) for i in range(1000,5000,50)]}, label = "mySourceI_PLOT") connE = p.Projection(spike_sourceE, ifcell, p.AllToAllConnector(weights=1.0, delays=0.2), target='excitatory') connI = p.Projection(spike_sourceI, ifcell, p.AllToAllConnector(weights=3.0, delays=0.2), target='inhibitory') #p1 = Population(100, IF_curr_alpha, structure=space.Grid2D()) #prepop = p.Population(100 ,p.SpikeSourceArray,{'spike_times':[[i for i in arange(10,duration,100)], [i for i in arange(50,duration,100)]]*(nn/2)}) #prj2_1 = Projection(p2, p1, method=AllToAllConnector(), target='excitatory')