def test_network_remove(): x = Counter() y = Counter() net = Network(x, y) net.remove(y) net.run(1*ms) assert_equal(x.count, 10) assert_equal(y.count, 0) # the relevance of this test is when we use weakref.proxy objects in # Network.objects, we should be able to add and remove these from # the Network just as much as the original objects for obj in copy.copy(net.objects): net.remove(obj) net.run(1*ms) assert_equal(x.count, 10) assert_equal(y.count, 0)
def test_network_remove(): x = Counter() y = Counter() net = Network(x, y) net.remove(y) net.run(1*ms) assert_equal(x.count, 10) assert_equal(y.count, 0) # the relevance of this test is when we use weakref.proxy objects in # Network.objects, we should be able to add and remove these from # the Network just as much as the original objects # TODO: Does this test make sense now that Network does not store weak # references by default? for obj in copy.copy(net.objects): net.remove(obj) net.run(1*ms) assert_equal(x.count, 10) assert_equal(y.count, 0)
def test_incorrect_network_use(): '''Test some wrong uses of `Network` and `MagicNetwork`''' assert_raises(TypeError, lambda: Network(name='mynet', anotherkwd='does not exist')) assert_raises(TypeError, lambda: Network('not a BrianObject')) net = Network() assert_raises(TypeError, lambda: net.add('not a BrianObject')) assert_raises(ValueError, lambda: MagicNetwork()) G = NeuronGroup(10, 'v:1') net.add(G) assert_raises(TypeError, lambda: net.remove(object())) assert_raises(MagicError, lambda: magic_network.add(G)) assert_raises(MagicError, lambda: magic_network.remove(G))
def run_task_hierarchical(task_info, taskdir, tempdir): # imports from brian2 import defaultclock, set_device, seed, TimedArray, Network, profiling_summary from brian2.monitors import SpikeMonitor, PopulationRateMonitor, StateMonitor from brian2.synapses import Synapses from brian2.core.magic import start_scope from brian2.units import second, ms, amp from integration_circuit import mk_intcircuit from sensory_circuit import mk_sencircuit, mk_sencircuit_2c, mk_sencircuit_2cplastic from burstQuant import spks2neurometric from scipy import interpolate # if you want to put something in the taskdir, you must create it first os.mkdir(taskdir) print(taskdir) # parallel code and flag to start set_device('cpp_standalone', directory=tempdir) #prefs.devices.cpp_standalone.openmp_threads = max_tasks start_scope() # simulation parameters seedcon = task_info['simulation']['seedcon'] runtime = task_info['simulation']['runtime'] runtime_ = runtime / second settletime = task_info['simulation']['settletime'] settletime_ = settletime / second stimon = task_info['simulation']['stimon'] stimoff = task_info['simulation']['stimoff'] stimoff_ = stimoff / second stimdur = stimoff - stimon smoothwin = task_info['simulation']['smoothwin'] nummethod = task_info['simulation']['nummethod'] # ------------------------------------- # Construct hierarchical network # ------------------------------------- # set connection seed seed( seedcon ) # set specific seed to test the same network, this way we also have the same synapses! # decision circuit Dgroups, Dsynapses, Dsubgroups = mk_intcircuit(task_info) decE = Dgroups['DE'] decI = Dgroups['DI'] decE1 = Dsubgroups['DE1'] decE2 = Dsubgroups['DE2'] # sensory circuit, ff and fb connections eps = 0.2 # connection probability d = 1 * ms # transmission delays of E synapses if task_info['simulation']['2cmodel']: if task_info['simulation']['plasticdend']: # plasticity rule in dendrites --> FB synapses will be removed from the network! Sgroups, Ssynapses, Ssubgroups = mk_sencircuit_2cplastic(task_info) else: # 2c model (Naud) Sgroups, Ssynapses, Ssubgroups = mk_sencircuit_2c(task_info) senE = Sgroups['soma'] dend = Sgroups['dend'] senI = Sgroups['SI'] senE1 = Ssubgroups['soma1'] senE2 = Ssubgroups['soma2'] dend1 = Ssubgroups['dend1'] dend2 = Ssubgroups['dend2'] # FB wDS = 0.003 # synaptic weight of FB synapses, 0.0668 nS when scaled by gleakE of sencircuit_2c synDE1SE1 = Synapses(decE1, dend1, model='w : 1', method=nummethod, on_pre='x_ea += w', delay=d) synDE2SE2 = Synapses(decE2, dend2, model='w : 1', method=nummethod, on_pre='x_ea += w', delay=d) else: # normal sensory circuit (Wimmer) Sgroups, Ssynapses, Ssubgroups = mk_sencircuit(task_info) senE = Sgroups['SE'] senI = Sgroups['SI'] senE1 = Ssubgroups['SE1'] senE2 = Ssubgroups['SE2'] # FB wDS = 0.004 # synaptic weight of FB synapses, 0.0668 nS when scaled by gleakE of sencircuit synDE1SE1 = Synapses(decE1, senE1, model='w : 1', method=nummethod, on_pre='x_ea += w', delay=d) synDE2SE2 = Synapses(decE2, senE2, model='w : 1', method=nummethod, on_pre='x_ea += w', delay=d) # feedforward synapses from sensory to integration wSD = 0.0036 # synaptic weight of FF synapses, 0.09 nS when scaled by gleakE of intcircuit synSE1DE1 = Synapses(senE1, decE1, model='w : 1', method=nummethod, on_pre='g_ea += w', delay=d) synSE1DE1.connect(p='eps') synSE1DE1.w = 'wSD' synSE2DE2 = Synapses(senE2, decE2, model='w : 1', method=nummethod, on_pre='g_ea += w', delay=d) synSE2DE2.connect(p='eps') synSE2DE2.w = 'wSD' # feedback synapses from integration to sensory b_fb = task_info['bfb'] # feedback strength, between 0 and 6 wDS *= b_fb # synaptic weight of FB synapses, 0.0668 nS when scaled by gleakE of sencircuit synDE1SE1.connect(p='eps') synDE1SE1.w = 'wDS' synDE2SE2.connect(p='eps') synDE2SE2.w = 'wDS' # ------------------------------------- # Create stimuli # ------------------------------------- if task_info['stimulus']['replicate']: # replicated stimuli across iters() np.random.seed(task_info['seed']) # numpy seed for OU process else: # every trials has different stimuli np.random.seed() # Note that in standalone we need to specify np seed because it's not taken care with Brian's seed() function! if task_info['simulation']['2cmodel']: I0 = task_info['stimulus']['I0s'] last_muOUd = np.loadtxt("last_muOUd.csv") # save the mean else: I0 = task_info['stimulus'][ 'I0'] # mean input current for zero-coherence stim c = task_info['c'] # stim coherence (between 0 and 1) mu1 = task_info['stimulus'][ 'mu1'] # av. additional input current to senE1 at highest coherence (c=1) mu2 = task_info['stimulus'][ 'mu2'] # av. additional input current to senE2 at highest coherence (c=1) sigma = task_info['stimulus'][ 'sigma'] # amplitude of temporal modulations of stim sigmastim = 0.212 * sigma # std of modulation of stim inputs sigmaind = 0.212 * sigma # std of modulations in individual inputs taustim = task_info['stimulus'][ 'taustim'] # correlation time constant of Ornstein-Uhlenbeck process # generate stim from OU process N_stim = int(senE1.__len__()) z1, z2, zk1, zk2 = generate_stim(N_stim, stimdur, taustim) # stim2exc i1 = I0 * (1 + c * mu1 + sigmastim * z1 + sigmaind * zk1) i2 = I0 * (1 + c * mu2 + sigmastim * z2 + sigmaind * zk2) stim_dt = 1 * ms i1t = np.concatenate((np.zeros((int(stimon / ms), N_stim)), i1.T, np.zeros((int( (runtime - stimoff) / stim_dt), N_stim))), axis=0) i2t = np.concatenate((np.zeros((int(stimon / ms), N_stim)), i2.T, np.zeros((int( (runtime - stimoff) / stim_dt), N_stim))), axis=0) Irec = TimedArray(np.concatenate((i1t, i2t), axis=1) * amp, dt=stim_dt) # ------------------------------------- # Simulation # ------------------------------------- # set initial conditions (different for evert trial) seed() decE.g_ea = '0.2 * rand()' decI.g_ea = '0.2 * rand()' decE.V = '-52*mV + 2*mV * rand()' decI.V = '-52*mV + 2*mV * rand()' # random initialization near 0, prevent an early decision! senE.g_ea = '0.05 * (1 + 0.2*rand())' senI.g_ea = '0.05 * (1 + 0.2*rand())' senE.V = '-52*mV + 2*mV*rand()' # random initialization near Vt, avoid initial bump! senI.V = '-52*mV + 2*mV*rand()' if task_info['simulation']['2cmodel']: dend.g_ea = '0.05 * (1 + 0.2*rand())' dend.V_d = '-72*mV + 2*mV*rand()' dend.muOUd = np.tile(last_muOUd, 2) * amp # create monitors rateDE1 = PopulationRateMonitor(decE1) rateDE2 = PopulationRateMonitor(decE2) rateSE1 = PopulationRateMonitor(senE1) rateSE2 = PopulationRateMonitor(senE2) subSE = int(senE1.__len__()) spksSE = SpikeMonitor(senE[subSE - 100:subSE + 100]) # last 100 of SE1 and first 100 of SE2 # construct network net = Network(Dgroups.values(), Dsynapses.values(), Sgroups.values(), Ssynapses.values(), synSE1DE1, synSE2DE2, synDE1SE1, synDE2SE2, rateDE1, rateDE2, rateSE1, rateSE2, spksSE, name='hierarchicalnet') # create more monitors for plot if task_info['simulation']['pltfig1']: # inh rateDI = PopulationRateMonitor(decI) rateSI = PopulationRateMonitor(senI) # spk monitors subDE = int(decE1.__len__() * 2) spksDE = SpikeMonitor(decE[:subDE]) spksSE = SpikeMonitor(senE) # state mons no more, just the arrays stim1 = i1t.T stim2 = i2t.T stimtime = np.linspace(0, runtime_, stim1.shape[1]) # construct network net = Network(Dgroups.values(), Dsynapses.values(), Sgroups.values(), Ssynapses.values(), synSE1DE1, synSE2DE2, synDE1SE1, synDE2SE2, spksDE, rateDE1, rateDE2, rateDI, spksSE, rateSE1, rateSE2, rateSI, name='hierarchicalnet') if task_info['simulation']['plasticdend']: # create state monitor to follow muOUd and add it to the networks dend_mon = StateMonitor(dend1, variables=['muOUd', 'Ibg', 'g_ea', 'B'], record=True, dt=1 * ms) net.add(dend_mon) # remove FB synapses! net.remove([synDE1SE1, synDE2SE2, Dsynapses.values()]) print( " FB synapses and synapses of decision circuit are ignored in this simulation!" ) # run hierarchical net net.run(runtime, report='stdout', profile=True) print(profiling_summary(net=net, show=10)) # nice plots on cluster if task_info['simulation']['pltfig1']: plot_fig1b([ rateDE1, rateDE2, rateDI, spksDE, rateSE1, rateSE2, rateSI, spksSE, stim1, stim2, stimtime ], smoothwin, taskdir) # ------------------------------------- # Burst quantification # ------------------------------------- events = np.zeros(1) bursts = np.zeros(1) singles = np.zeros(1) spikes = np.zeros(1) last_muOUd = np.zeros(1) # neurometric params dt = spksSE.clock.dt validburst = task_info['sen']['2c']['validburst'] smoothwin_ = smoothwin / second if task_info['simulation']['burstanalysis']: if task_info['simulation']['2cmodel']: last_muOUd = np.array(dend_mon.muOUd[:, -int(1e3):].mean(axis=1)) if task_info['simulation']['plasticdend']: # calculate neurometric info per population events, bursts, singles, spikes, isis = spks2neurometric( spksSE, runtime, settletime, validburst, smoothwin=smoothwin_, raster=False) # plot & save weigths after convergence eta0 = task_info['sen']['2c']['eta0'] tauB = task_info['sen']['2c']['tauB'] targetB = task_info['targetB'] B0 = tauB * targetB tau_update = task_info['sen']['2c']['tau_update'] eta = eta0 * tau_update / tauB plot_weights(dend_mon, events, bursts, spikes, [targetB, B0, eta, tauB, tau_update, smoothwin_], taskdir) plot_rasters(spksSE, bursts, targetB, isis, runtime_, taskdir) else: # calculate neurometric per neuron events, bursts, singles, spikes, isis = spks2neurometric( spksSE, runtime, settletime, validburst, smoothwin=smoothwin_, raster=True) plot_neurometric(events, bursts, spikes, stim1, stim2, stimtime, (settletime_, runtime_), taskdir, smoothwin_) plot_isis(isis, bursts, events, (settletime_, runtime_), taskdir) # ------------------------------------- # Choice selection # ------------------------------------- # population rates and downsample originaltime = rateDE1.t / second interptime = np.linspace(0, originaltime[-1], originaltime[-1] * 100) # every 10 ms fDE1 = interpolate.interp1d( originaltime, rateDE1.smooth_rate(window='flat', width=smoothwin)) fDE2 = interpolate.interp1d( originaltime, rateDE2.smooth_rate(window='flat', width=smoothwin)) fSE1 = interpolate.interp1d( originaltime, rateSE1.smooth_rate(window='flat', width=smoothwin)) fSE2 = interpolate.interp1d( originaltime, rateSE2.smooth_rate(window='flat', width=smoothwin)) rateDE = np.array([f(interptime) for f in [fDE1, fDE2]]) rateSE = np.array([f(interptime) for f in [fSE1, fSE2]]) # select the last half second of the stimulus newdt = runtime_ / rateDE.shape[1] settletimeidx = int(settletime_ / newdt) dec_ival = np.array([(stimoff_ - 0.5) / newdt, stimoff_ / newdt], dtype=int) who_wins = rateDE[:, dec_ival[0]:dec_ival[1]].mean(axis=1) # divide trls into preferred and non-preferred pref_msk = np.argmax(who_wins) poprates_dec = np.array([rateDE[pref_msk], rateDE[~pref_msk]]) # 0: pref, 1: npref poprates_sen = np.array([rateSE[pref_msk], rateSE[~pref_msk]]) results = { 'raw_data': { 'poprates_dec': poprates_dec[:, settletimeidx:], 'poprates_sen': poprates_sen[:, settletimeidx:], 'pref_msk': np.array([pref_msk]), 'last_muOUd': last_muOUd }, 'sim_state': np.zeros(1), 'computed': { 'events': events, 'bursts': bursts, 'singles': singles, 'spikes': spikes, 'isis': np.array(isis) } } return results