def run(TSTOP=250, group_size=100, g_time=150, neuron_n=1000, linear=True, ext_w=0.2, inh_w=0.2): """Run a simulation and return the resulting spiketrain""" # Basic equation of the model eq = """dv/dt = -gamma*v + I0 : volt Ie : volt Ii : volt """ thetaU = 16 * mV tauM = 8 * ms gamma = 1 / tauM I0 = 17.6 * mV / tauM #Build the group of neuron to use G = br2.NeuronGroup(neuron_n, threshold="v>thetaU", reset="v=0*mV", method='euler', model=eq) #Record the spikes from this group spikes = br2.SpikeMonitor(G) #Build stimulation stim = br2.SpikeGeneratorGroup(1, [0], [g_time]*ms - br2.defaultclock.dt) stim_syn = br2.Synapses(stim, G, on_pre="v += 2*thetaU") stim_syn.connect(i=0, j=np.arange(group_size)) br2.magic_network.schedule = ['start', 'groups', 'synapses', 'thresholds', 'resets', 'end'] connections = np.random.rand(1000, 1000) < 0.3 exc_or_inh = np.random.rand(1000, 1000) < 0.5 exc_i, exc_j = (connections & exc_or_inh).nonzero() inh_i, inh_j = (connections & ~exc_or_inh).nonzero() if linear: G.run_regularly(''' v += Ie + Ii Ie = 0*mV Ii = 0*mV ''', when='after_synapses') else: G.run_regularly(''' v += clip(Ie, 0*mV, 2*mV) + clip(2*(Ie-2*mV), 0*mV, 4*mV) + Ii Ie = 0*mV Ii = 0*mV ''', when='after_synapses') dt = br2.defaultclock.dt exc_syn = br2.Synapses(G, G, on_pre='Ie += %s*mV' % (ext_w), delay=5*ms-dt) inh_syn = br2.Synapses(G, G, on_pre='Ii -= %s*mV' % (inh_w), delay=5*ms-dt) exc_syn.connect(i=exc_i, j=exc_j) inh_syn.connect(i=inh_i, j=inh_j) #Set random initial conditions G.v = np.random.rand(neuron_n) * 16 * mV br2.run(TSTOP * ms) return spikes
def make_classification_network(self, number_of_stimuli, network_name): if network_name not in self.networks: network_size = number_of_stimuli * self.number_of_neurons count_mat = np.zeros((int(self.stimulus_duration / ms * 10), network_size), int) target = b2.NeuronGroup(N=number_of_stimuli, model=self.eqs, threshold='v>threshold', reset='v=0', namespace={'tau': self.tau, 'threshold': self.threshold}) driving = b2.SpikeGeneratorGroup(N=network_size, indices=[0], times=[0 * ms]) # counts = b2.TimedArray(values=_count_mat, dt=b2.defaultclock.dt) synapses = b2.Synapses(source=driving, target=target, model='w: 1', on_pre='v+=w*counts(t, i)') i = np.arange(network_size) j = np.repeat(range(number_of_stimuli), self.number_of_neurons) synapses.connect(j=j, i=i) synapses.w = np.tile(self.weights, reps=number_of_stimuli) spikes = b2.SpikeMonitor(target, record=True) voltage = b2.StateMonitor(target, 'v', record=True) net = b2.Network([target, driving, synapses, spikes, voltage]) net.store() self.networks[network_name] = dict(net=net, count_mat=count_mat, synapses=synapses, v_mon=voltage, spike_mon=spikes, number_of_stimuli=number_of_stimuli, driving=driving) else: self.networks[network_name]['synapses'].w = np.tile(self.weights, reps=number_of_stimuli)
def set_in_poisson(self, target): """ Set poissonian input to a group of neurons target: list of targert groups N_p: # of poissons inputs f_p: frequency of P sp: sparseness of connections coef_ep: factor of ep conductance to g_exc """ ## somehow PoissonInput is way slower! also leads to diff behaviour #for gr in target: #inp_poisson = bb.PoissonInput(gr,N=100,rate=f_p, #weight=2.*self.g_ee,state='ge') #self.network.add(inp_poisson) N_p = self.ext_input['N_p'] f_p = self.ext_input['f_p'] sp = self.ext_input['sp'] coef_ep = self.ext_input['coef_ep'] self.P_poisson = bb.PoissonGroup(N_p, f_p, clock=self.network.clock) self.network.add(self.P_poisson) for gr in target: #Cep = bb.Connection(self.P_poisson, gr,'ge', # weight=coef_ep*self.g_ee, sparseness=sp) Cep = bb.Synapses(self.P_poisson, gr, model='w:siemens', pre='ge+=w') Cep.connect(p=sp) Cep.w = coef_ep * self.g_ee self.network.add(Cep)
def simulate(tau): b2.start_scope() if standalone_mode: b2.get_device().reinit() b2.get_device().activate(build_on_run=False, directory=directory_name) net = b2.Network() P = b2.PoissonGroup(num_inputs, rates=input_rate) G = b2.NeuronGroup(1, eqs, threshold='v>1', reset='v=0', method='euler') S = b2.Synapses(P, G, on_pre='v += weight') S.connect() M = b2.SpikeMonitor(G) net.add(P) net.add(G) net.add(S) net.add(M) net.run(1000 * b2.ms) if standalone_mode: b2.get_device().build(directory=directory_name, compile=True, run=True, debug=False) return M
def random_connections(NTWK): CONN = np.empty((len(NTWK['POPS']), len(NTWK['POPS'])), dtype=object) CONN2 = [] for ii, jj in itertools.product(range(len(NTWK['POPS'])), range(len(NTWK['POPS']))): if (NTWK['M'][ii, jj]['pconn'] > 0) and (NTWK['M'][ii, jj]['Q'] != 0): CONN[ii,jj] = brian2.Synapses(NTWK['POPS'][ii], NTWK['POPS'][jj], model='w:siemens',\ on_pre='G'+NTWK['M'][ii,jj]['name']+'_post+=w') # CONN[ii,jj].connect(p=NTWK['M'][ii,jj]['pconn'], condition='i!=j') # N.B. the brian2 settings does weird things (e.g. it creates synchrony) # so we draw manually the connection to fix synaptic numbers N_per_cell = int(NTWK['M'][ii, jj]['pconn'] * NTWK['POPS'][ii].N) if ii == jj: # need to take care of no autapse i_rdms = np.concatenate([\ np.random.choice( np.delete(np.arange(NTWK['POPS'][ii].N), [iii]), N_per_cell)\ for iii in range(NTWK['POPS'][jj].N)]) else: i_rdms = np.concatenate([\ np.random.choice(np.arange(NTWK['POPS'][ii].N), N_per_cell)\ for jjj in range(NTWK['POPS'][jj].N)]) j_fixed = np.concatenate([ np.ones(N_per_cell, dtype=int) * jjj for jjj in range(NTWK['POPS'][jj].N) ]) CONN[ii, jj].connect(i=i_rdms, j=j_fixed) CONN[ii, jj].w = NTWK['M'][ii, jj]['Q'] * brian2.nS CONN2.append(CONN[ii, jj]) return CONN2
def set_syn_input(self, target, time): '''adding sync inputs at some time points''' ext_in = bb.SpikeGeneratorGroup(1, [(0, time)], self.network.clock) C_syne = bb.Synapses(ext_in, target, model='w:siemens', pre='ge+=w') C_syne.connect_random(ext_in, target, sparseness=1.) C_syne.w = 30. * self.g_ee self.network.add(ext_in, C_syne)
def inhibition(snn): xmax = int(numpy.max(snn.x) + 1) ymax = int(numpy.max(snn.y) + 1) S = brian2.Synapses(snn, snn, on_pre='v *= 0') #S.connect(condition='if (j%64)!=0:i=j-65 or if (j%4096)-64>0:i=j-64 or if (j%64)!=63:i=j-63 \ # or if (j%64)!=0:i=j-1 or if (j%64)!=63:i=j+1 or if (j%64)!=0: i=j+63 or if (j%4096)+64<4096:i=j+64 or \ # if (j%64)!=63: i=j+65') S.connect(condition='i==j-ymax-1 and j%ymax!=0 and floor(j/ymax)%xmax!=0' ) #neuron (x-1,y-1) S.connect(condition='i==j-ymax and floor(j/ymax)%xmax!=0') #neuron (x-1,y) S.connect( condition='i==j-ymax+1 and j%ymax!=ymax-1 and floor(j/ymax)%xmax!=0' ) #neuron (x-1,y+1) S.connect(condition='i==j-1 and j%ymax!=0') #neuron (x,y-1) S.connect(condition='i==j+1 and j%ymax!=ymax-1') #neuron (x,y+1) S.connect( condition='i==j+ymax-1 and j%ymax!=0 and floor(j/ymax)%xmax!=xmax-1' ) #neuron (x+1,y-1) S.connect( condition='i==j+ymax and floor(j/ymax)%xmax!=xmax-1') #neuron (x+1,y) S.connect( condition='i==j+ymax+1 and j%ymax!=ymax-1 and floor(j/ymax)%xmax!=xmax-1' ) #neuron (x+1,y+1) return S
def main8(): # using generator syntax to create connections bs.start_scope() n_neurons = 10 G = bs.NeuronGroup(n_neurons, 'v:1') S = bs.Synapses(G, G) """ i : int, ndarray of int, optional The presynaptic neuron indices (in the form of an index or an array of indices). Must be combined with ``j`` argument. j : int, ndarray of int, str, optional The postsynaptic neuron indices. It can be an index or array of indices if combined with the ``i`` argument, or it can be a string generator expression. """ # the above is the reason why j="i" works but not i = "j" # only j can take in string. i has to take in int, or ndarray of int S.connect(j='i', skip_if_invalid=True) # You can also do it the following way # S.connect(condition = 'i==j', skip_if_invalid=True) visualise_connectivity(S)
def link_event_to_snn(events, snn): rmax = int(numpy.max(snn.r)) xmax = int(numpy.max(snn.x) + 1) ymax = int(numpy.max(snn.y) + 1) v_update = 0.2 * brian2.mvolt indices = events.indices[:] xc = indices % xmax yc = indices // xmax synapses = brian2.Synapses(events, snn, model='v_update : volt', on_pre='v += v_update') for index in range(0, len(indices), 1): for r in range(4, rmax, 4): x0 = xc[index] y0 = yc[index] if (x0 + r < xmax and x0 - r >= 0 and y0 + r < ymax and y0 - r >= 0): x, y = solve_centers(x0, y0, r, xmax, ymax) #print(rmax*x+xmax*rmax*y+r-1) synapses.connect(i=index, j=(r // 4 - 1) * xmax * ymax + x * ymax + y) #print(x, y, xc[index], yc[index], r) N = len(synapses) synapses.v_update = v_update * numpy.ones((1, N)) return synapses
def main1(): # adding different weights per synapse # ex0 distance-dependent connectivity function= # important for a lot neurons that have weaker inhibitory/excitatory connections as the distances get wider bs.start_scope() n_neurons = 30 neuron_spacing = 50 * bs.umetre width = n_neurons / 4.0 * neuron_spacing G = bs.NeuronGroup(n_neurons, 'x:metre') G.x = 'i*neuron_spacing' # All synapses are connected (excluding self-connections) S = bs.Synapses(G, G, 'w:1') S.connect(condition='i!=j') # basically, any variable you use in the definition of equations is usable as an actual variable in code and vice versa # therefore, even if the variable width is labelled as not being used, it actually is lol S.w = 'exp(-(x_pre-x_post)**2/(2*width**2))' # visualise_connectivity(S) plt.clf() plt.scatter(S.x_pre / bs.um, S.x_post / bs.um, S.w * 20) plt.xlabel('source neuron position (um)') plt.ylabel('Target neuron position (um)') plt.show()
def simulate_WORM_neuron(input_current, simulation_time=5 * b2.ms, v_leak=V_LEAK, g_leak=G_LEAK, c_m=C_M, rest_pot=R_POT, tau=MEMBRANE_TIME_SCALE, f_t=FIRING_THRESHOLD): # differential equation of neuron model eqs = """ dv/dt = ( g_leak * (v_leak - v) + input_current(t,i) ) / c_m : volt """ # LIF neuron using Brian2 library neuron = b2.NeuronGroup(2, model=eqs, threshold='v>f_t', method="linear") neuron.v = rest_pot # set initial value # monitoring membrane potential of neuron and injecting current state_monitor = b2.StateMonitor(neuron, ["v"], record=True) S = b2.Synapses(neuron, neuron, model='w : volt', on_pre='v += w') S.connect(i=0, j=1) S.w = 0.01 * b2.mV # run the simulation b2.run(simulation_time) return state_monitor
def visualisation_synapses(source, target, connectivity): pre = 'v_post += 1*mV' synapses = b2.Synapses(source=source, target=target, on_pre=pre) synapses.connect(connectivity) return synapses
def main6(): # connecting only the neighbouring neurons bs.start_scope() n_neurons = 10 G = bs.NeuronGroup(n_neurons, 'v:1') S = bs.Synapses(G, G) # connect only if the neuron is less than 4 spaces and the neuron index isnt the same S.connect(condition='abs(i-j)<4 and i!=j') visualise_connectivity(S)
def main7(): # using generator syntax to create connections bs.start_scope() n_neurons = 10 G = bs.NeuronGroup(n_neurons, 'v:1') S = bs.Synapses(G, G) # connect only if the neuron is less than 4 spaces and the neuron index isnt the same # skip in invalid is needed here since on the edge connections, the i+4 goes over bound and causes error S.connect(j='k for k in range(i-3, i+4) if i != k', skip_if_invalid=True) visualise_connectivity(S)
def main4(plot=True): # what if we want to keep the input spike exactly the same throughout different taus? # Solution: We run PoissonGroup once, store all the spikes, and use the stored spikes across the multiple runs bs.start_scope() num_inputs = 100 input_rate = 10 * bs.Hz w = 0.1 tau_range = bs.linspace(1, 10, 30) * bs.ms output_rates = [] P = bs.PoissonGroup(num_inputs, rates=input_rate) p_monitor = bs.SpikeMonitor(P) one_second = 1 * bs.second """ Note that in the code above, we created Network objects. The reason is that in the loop, if we just called run it would try to simulate all the objects, including the Poisson neurons P, and we only want to run that once. We use Network to specify explicitly which objects we want to include. """ net = bs.Network(P, p_monitor) net.run(one_second) # keeps a copy of the spikes that are generated by the PoissonGroup during that explicit run earlier spikes_i = p_monitor.i spikes_t = p_monitor.t # Construct network that we run each time sgg = bs.SpikeGeneratorGroup(num_inputs, spikes_i, spikes_t) eqs = ''' dv/dt = -v/tau : 1 ''' G = bs.NeuronGroup(1, eqs, threshold='v>1', reset='v=0', method='exact') S = bs.Synapses(sgg, G, on_pre='v += w') S.connect() # fully connected network g_monitor = bs.SpikeMonitor(G) # store the current state of the network net = bs.Network(sgg, G, S, g_monitor) net.store() for tau in tau_range: net.restore() net.run(one_second) output_rates.append(g_monitor.num_spikes / bs.second) if plot: plt.clf() plt.plot(tau_range / bs.ms, output_rates) plt.xlabel(r'$\tau$ (ms)') plt.ylabel('Firing Rate (spikes/s)') # there is much less noise compared to before where we used different PoissonGroup everytime plt.show()
def main5(): # the effect of probability in connections bs.start_scope() n_neurons = 10 G = bs.NeuronGroup(n_neurons, model='v:1') for p in [0.1, 0.5, 1.0]: S = bs.Synapses(G, G) # connect neurons that do not have the same index with probability p S.connect(condition='i!=j', p=p) visualise_connectivity(S) plt.suptitle('p = ' + str(p))
def main2(): # setting synaptic weight bs.start_scope() n_neurons = 3 eqs = ''' dv/dt = (I-v)/tau : 1 (unless refractory) I : 1 tau: second ''' threshold = 'v>1' reset = 'v = 0' refractory = 10 * bs.ms G = bs.NeuronGroup(N=n_neurons, model=eqs, threshold=threshold, reset=reset, method='exact', refractory=refractory) G.I = [2, 0, 0] # represents the I in eqs # driving current should be bigger than the threshold, otherwise it wont spike at all # I = 0 means that the voltage wont change since there is no driving current G.tau = [10, 100, 100] * bs.ms # represents the tau in eqs # unlike last time, the tau is defined with the neuron so we see the effects of different values # model : `str`, `Equations`, optional # The model equations for the synapses. # you need to set this as model= in Synapses in order to incorporate weight synapse_model = 'w : 1' # So in total, what this model says is that whenever two neurons in G are connected by a synapse, # when the source neuron fires a spike the target neuron will have its value of v increased by 0.2. S = bs.Synapses(source=G, target=G, model=synapse_model, on_pre='v_post += w') S.connect(i=0, j=[1, 2]) # So this will give a synaptic connection from 0 to 1 with weight 0.2=0.2*1 and from 0 to 2 with weight 0.4=0.2*2. S.w = 'j*0.2' state_monitor = bs.StateMonitor(G, 'v', record=True) bs.run(100 * bs.ms) plt.plot(state_monitor.t / bs.ms, state_monitor.v[0], label='Neuron 0') plt.plot(state_monitor.t / bs.ms, state_monitor.v[1], label='Neuron 1') plt.plot(state_monitor.t / bs.ms, state_monitor.v[2], label='Neuron 2') plt.xlabel('Time (ms)') plt.ylabel('v') plt.legend() plt.show()
def SetSynapses(neuron_groups, synapse_names): #pudb.set_trace() #synapse_names = ['Si', 'Sl', [], 'Sb'] s = 1.0 N_hidden_layers = len(neuron_groups[1]) #pudb.set_trace() Si = br.Synapses(neuron_groups[0], neuron_groups[1][0], model='w:1', pre='ge+=w*mV', \ name=synapse_names[0]) Sl = br.Synapses(neuron_groups[1][-1], neuron_groups[2], model='w:1', pre='ge+=w*mV', \ name=synapse_names[1]) #Sa = [] #Sa.append(br.Synapses(neuron_groups[1][2], neuron_groups[2][0], model='w:1', pre='ge+=w*mV', \ # name=synapse_names[2][0])) #for i in range(N_hidden_layers - 1): # Sa.append(br.Synapses(neuron_groups[1][i], neuron_groups[1][i+1], model='w:1', \ # pre='ge+=w*mV'), name=synapse_names[2][i+1]) #Sb = br.Synapses(neuron_groups[2][-1], neuron_groups[3], model='w:1', pre='ge+=w*mV', \ # name=synapse_names[3]) synapse_groups = [Si, Sl] return synapse_groups
def main3(): # introducing delay bs.start_scope() n_neurons = 3 eqs = ''' dv/dt = (I-v)/tau : 1 (unless refractory) I : 1 tau: second ''' threshold = 'v>1' reset = 'v = 0' refractory = 10 * bs.ms G = bs.NeuronGroup(N=n_neurons, model=eqs, threshold=threshold, reset=reset, method='exact', refractory=refractory) G.I = [2, 0, 0] # represents the I in eqs # driving current should be bigger than the threshold, otherwise it wont spike at all # I = 0 means that the voltage wont change since there is no driving current G.tau = [10, 100, 100] * bs.ms # represents the tau in eqs # unlike last time, the tau is defined with the neuron so we see the effects of different values synapse_model = 'w : 1' # So in total, what this model says is that whenever two neurons in G are connected by a synapse, # when the source neuron fires a spike the target neuron will have its value of v increased by 0.2. S = bs.Synapses(source=G, target=G, model=synapse_model, on_pre='v_post += w') S.connect(i=0, j=[1, 2]) S.w = 'j*0.2' S.delay = 'j*2*ms' state_monitor = bs.StateMonitor(G, 'v', record=True) bs.run(100 * bs.ms) plt.plot(state_monitor.t / bs.ms, state_monitor.v[0], label='Neuron 0') plt.plot(state_monitor.t / bs.ms, state_monitor.v[1], label='Neuron 1') plt.plot(state_monitor.t / bs.ms, state_monitor.v[2], label='Neuron 2') plt.xlabel('Time (ms)') plt.ylabel('v') plt.legend() plt.show()
def main4(): # more complex connectivity bs.start_scope() n_neurons = 3 tau = 100 * bs.ms eqs = ''' dv/dt = (I-v)/tau : 1 (unless refractory) I : 1 ''' threshold = 'v>1' reset = 'v = 0' refractory = 10 * bs.ms G = bs.NeuronGroup(N=n_neurons, model=eqs, threshold=threshold, reset=reset, method='exact', refractory=refractory) G.I = [2, 0, 0] synapse_model = 'w : 1' # p : float, str, optional # The probability to create ``n`` synapses wherever the ``condition`` # evaluates to true. Cannot be used with generator syntax for ``j``. S = bs.Synapses(source=G, target=G, model=synapse_model, on_pre='v_post += w') S.connect(condition='i!=j', p=0.5) S.w = 'j*0.2' state_monitor = bs.StateMonitor(G, 'v', record=True) bs.run(100 * bs.ms) plt.plot(state_monitor.t / bs.ms, state_monitor.v[0], label='Neuron 0') plt.plot(state_monitor.t / bs.ms, state_monitor.v[1], label='Neuron 1') plt.plot(state_monitor.t / bs.ms, state_monitor.v[2], label='Neuron 2') plt.xlabel('Time (ms)') plt.ylabel('v') plt.legend() plt.show() visualise_connectivity(S=S)
def nonplastic_synapses(source, target, connectivity, synapse_type): if synapse_type == 'excitatory': pre = 'ge_post += w * siemens' elif synapse_type == 'inhibitory': pre = 'gi_post += w * siemens' else: raise Exception("Invalid synapse type: %s" % synapse_type) model = 'w : 1' synapses = b2.Synapses(source=source, target=target, model=model, on_pre=pre) synapses.connect(connectivity) return synapses
def construct_feedforward_input(NTWK, target_pop, afferent_pop,\ t, rate_array,\ verbose=False, SEED=1): """ This generates an input asynchronous from post synaptic neurons to post-synaptic neurons POPS and AFFERENCE_ARRAY should be 1D arrrays as their is only one source population 'pop_for_conductance' is the string identifying the source conductance that will be incremented by the afferent input !! """ Model = NTWK['Model'] # extract parameters of the afferent input Nsyn = Model['p_' + afferent_pop + '_' + target_pop] * Model['N_' + afferent_pop] Qsyn = Model['Q_' + afferent_pop + '_' + target_pop] #finding the target pop in the brian2 objects ipop = np.argwhere(NTWK['POPULATIONS'] == target_pop).flatten()[0] if Nsyn > 0: if verbose: print('drawing Poisson process for afferent input [...]') indices, times = set_spikes_from_time_varying_rate(\ t, rate_array,\ NTWK['POPS'][ipop].N, Nsyn, SEED=(SEED+2)**2%100) spikes = brian2.SpikeGeneratorGroup(NTWK['POPS'][ipop].N, indices, times) pre_increment = 'G' + afferent_pop + target_pop + ' += w' synapse = brian2.Synapses(spikes, NTWK['POPS'][ipop], on_pre=pre_increment,\ model='w:siemens') synapse.connect('i==j') synapse.w = Qsyn * brian2.nS NTWK['PRE_SPIKES'].append(spikes) NTWK['PRE_SYNAPSES'].append(synapse) else: print('Nsyn = 0 for', afferent_pop + '_' + target_pop)
def __init__(self, src: NeuronGroup, tar: NeuronGroup, name: str = '', delay: int = 1, i: List[int] = None, j: List[int] = None, w: List[int] = None): """ :param src: Neuron group before the synapse. :param tar: Neuron group after the synapse. :param name: Name of this synapse. :param delay: Number of time steps needed that spike can transfer from pre-synapse neuron to post-synapse neuron. It can be 0, 1 or 2. :param i: A list of indexes of pre-synapse neurons. The length of this must be equal to the length of j and w. :param j: A list of indexes of post-synapse neurons. The length of this must be equal to the length of i and w. :param w: A list of weight of each synapse connection. The length of this must be equal to the length of i and j. """ # save parameters self.src = src self.tar = tar self.name = name self.delay = delay self.i = i self.j = j self.w = w # check delay if self.delay < 0 or self.delay > 2: raise Exception("invalid delay value") # check length of i, j, w if len(self.i) != len(self.j) or len(self.i) != len(self.w): raise Exception("mismatch of length of i, j and w") # construct brain synapse self.brian = b.Synapses(self.src.brian, self.tar.brian, 'w : 1', 'v+=w', dt=self.delay * b.ms) self.brian.connect(i=self.i, j=self.j) self.brian.w = self.w
def main1(): # checking effect of synaptic connectipn bs.start_scope() n_neurons = 2 eqs = ''' dv/dt = (I-v)/tau : 1 (unless refractory) I : 1 tau: second ''' threshold = 'v>1' reset = 'v = 0' refractory = 10 * bs.ms G = bs.NeuronGroup(N=n_neurons, model=eqs, threshold=threshold, reset=reset, method='exact', refractory=refractory) G.I = [2, 0] # represents the I in eqs # driving current should be bigger than the threshold, otherwise it wont spike at all # I = 0 means that the voltage wont change since there is no driving current G.tau = [10, 100] * bs.ms # represents the tau in eqs # unlike last time, the tau is defined with the neuron so we see the effects of different values # So in total, what this model says is that whenever two neurons in G are connected by a synapse, # when the source neuron fires a spike the target neuron will have its value of v increased by 0.2. S = bs.Synapses(source=G, target=G, on_pre='v_post += 0.2') # calling just S.connect() without any parameters just connects every source neuron with every target neuron # fully connected S.connect(i=0, j=1) visualise_connectivity(S) state_monitor = bs.StateMonitor(G, 'v', record=True) bs.run(100 * bs.ms) plt.plot(state_monitor.t / bs.ms, state_monitor.v[0], label='Neuron 0') plt.plot(state_monitor.t / bs.ms, state_monitor.v[1], label='Neuron 1') plt.xlabel('Time (ms)') plt.ylabel('v') plt.legend() plt.show()
def simulate(tau): # These two lines are needed to start a new standalone simulation: b2.device.reinit() b2.device.activate() eqs = ''' dv/dt = -v/tau : 1 ''' net = b2.Network() P = b2.PoissonGroup(num_inputs, rates=input_rate) G = b2.NeuronGroup(1, eqs, threshold='v>1', reset='v=0', method='euler') S = b2.Synapses(P, G, on_pre='v += weight') S.connect() M = b2.SpikeMonitor(G) net.add([P, G, S, M]) net.run(1000 * b2.ms) return M
def make_plot_network(self): count_mat = np.zeros((int(self.stimulus_duration / ms * 10), self.number_of_neurons), int) target = b2.NeuronGroup(N=1, model=self.eqs, threshold='v>threshold', reset='v=0', namespace={'tau': self.tau, 'threshold': self.threshold}) driving = b2.SpikeGeneratorGroup(N=self.number_of_neurons, indices=[0], times=[0 * ms]) synapses = b2.Synapses(source=driving, target=target, model='w: 1', on_pre='v+=w*counts(t, i)') synapses.connect(i=range(number_of_neurons), j=[0] * number_of_neurons) synapses.w = self.weights spikes = b2.SpikeMonitor(target, record=True) voltage = b2.StateMonitor(target, 'v', record=True) net = b2.Network([target, driving, synapses, spikes, voltage]) net.store() self.networks['plot'] = dict(net=net, synapses=synapses, count_mat=count_mat, v_mon=voltage, spike_mon=spikes, driving=driving)
def build_up_recurrent_connections(NTWK, SEED=1, verbose=False): """ Construct the synapses from the connectivity matrix """ CONN = np.empty((len(NTWK['POPS']), len(NTWK['POPS'])), dtype=object) CONN2 = [] np.random.seed(SEED) if verbose: print('------------------------------------------------------') print('drawing random connections [...]') print('------------------------------------------------------') for ii, jj in itertools.product(range(len(NTWK['POPS'])), range(len(NTWK['POPS']))): if (NTWK['M'][ii, jj]['pconn'] > 0) and (NTWK['M'][ii, jj]['Q'] != 0): CONN[ii,jj] = brian2.Synapses(NTWK['POPS'][ii], NTWK['POPS'][jj], model='w:siemens',\ on_pre='G'+NTWK['M'][ii,jj]['name']+'_post+=w') # N.B. the following brian2 settings: # CONN[ii,jj].connect(p=NTWK['M'][ii,jj]['pconn'], condition='i!=j') # does not fix synaptic numbers, so we draw manually the connections N_per_cell = int(NTWK['M'][ii, jj]['pconn'] * NTWK['POPS'][ii].N) if ii == jj: # need to take care of no autapse i_rdms = np.concatenate([\ np.random.choice( np.delete(np.arange(NTWK['POPS'][ii].N), [iii]), N_per_cell)\ for iii in range(NTWK['POPS'][jj].N)]) else: i_rdms = np.concatenate([\ np.random.choice(np.arange(NTWK['POPS'][ii].N), N_per_cell)\ for jjj in range(NTWK['POPS'][jj].N)]) j_fixed = np.concatenate([ np.ones(N_per_cell, dtype=int) * jjj for jjj in range(NTWK['POPS'][jj].N) ]) CONN[ii, jj].connect(i=i_rdms, j=j_fixed) CONN[ii, jj].w = NTWK['M'][ii, jj]['Q'] * brian2.nS CONN2.append(CONN[ii, jj]) NTWK['REC_SYNAPSES'] = CONN2
def attach_dummy_group(self, pf=.06): self.dummy_group = bb.NeuronGroup(500, eqs_exc, threshold='v>-50*mV', reset='v=-60*mV', refractory=2. * ms) self.C_ed = bb.Synapses(self.dummy_group, self.Pe, model='w:siemens', pre='ge+=w') for p1 in self.dummy_group: for p2 in p_index[n_gr + 1]: if np.random.random() < nn.pf_ee: self.C_ed[p1, p2] = True #self.C_ee[p1,p2].w=self.g_ee print('hui') #nn.C_ed.connect_random(nn.dummy_group,nn.p_ass_index[0][0],sparseness=pf) self.C_ed.w = self.g_ee self.C_ed.delay = self.D self.network.add(self.dummy_group, self.C_ed)
def random_distance_dependent_connections(NTWK): CONN = np.empty((len(NTWK['POPS']), len(NTWK['POPS'])), dtype=object) CONN2 = [] for ii, jj in itertools.product(range(len(NTWK['POPS'])), range(len(NTWK['POPS']))): if (NTWK['M'][ii, jj]['pconn'] > 0) and (NTWK['M'][ii, jj]['Q'] != 0): CONN[ii,jj] = brian2.Synapses(NTWK['POPS'][ii], NTWK['POPS'][jj], model='w:siemens',\ on_pre='G'+NTWK['M'][ii,jj]['name']+'_post+=w') # CONN[ii,jj].connect(p=NTWK['M'][ii,jj]['pconn'], condition='i!=j') # N.B. the brian2 settings does weird things (e.g. it creates synchrony) # so we draw manually the connection to fix synaptic numbers N_per_cell = int(NTWK['M'][ii, jj]['pconn'] * NTWK['POPS'][ii].N) if ii == jj: # need to take care of no autapse exclude_self = True else: exclude_self = False I_rdms, Delays = np.empty(0, dtype=int), np.empty(0) for index_target_cell in range(NTWK['POPS'][jj].N): i_rdms, delays = draw_spatially_dependent_connectivity_profile( index_target_cell, np.arange(NTWK['POPS'][ii].N), N_per_cell, NTWK['M'][ii, jj]['SpatialDecay'], NTWK['M'][ii, jj]['Delay'], exclude_self=exclude_self) I_rdms = np.concatenate([I_rdms, i_rdms]) Delays = np.concatenate([Delays, delays]) j_fixed = np.concatenate([ np.ones(N_per_cell, dtype=int) * jjj for jjj in range(NTWK['POPS'][jj].N) ]) CONN[ii, jj].connect(i=np.array(I_rdms, dtype=int), j=j_fixed) CONN[ii, jj].w = NTWK['M'][ii, jj]['Q'] * brian2.nS CONN[ii, jj].delay = Delays * brian2.ms CONN2.append(CONN[ii, jj]) return CONN2
def main3(plot=True): # even faster # will not always work # Since there is only single output neuron in the model above, # we can make multiple output neurons and make each time constants a parameter of the grop bs.start_scope() num_inputs = 100 input_rate = 10 * bs.Hz w = 0.1 tau_range = bs.linspace(1, 10, 30) * bs.ms num_tau = len(tau_range) P = bs.PoissonGroup(num_inputs, rates=input_rate) eqs = """ dv/dt = -v/tau : 1 tau : second """ # set output neurons with the same number of taus that you want to try with G = bs.NeuronGroup(num_tau, eqs, threshold='v>1', reset='v=0', method='exact') # set the taus for each individual neurons separately # (didn't know that this was possible) G.tau = tau_range S = bs.Synapses(P, G, on_pre='v += w') S.connect() spike_monitor = bs.SpikeMonitor(G) # Now we can just run once with no loop bs.run(1 * bs.second) # and each counts correspond to neuron with different taus output_rates = spike_monitor.count / bs.second # firing rate is count/duration if plot: plt.plot(tau_range / bs.ms, output_rates) plt.xlabel(r'$\tau$ (ms)') plt.ylabel('Firing rate (sp/s)') plt.show()