Пример #1
0
def runSim():
    f.pc.barrier()
    timing('start', 'runTime')
    if f.rank == 0:
        print('\nRunning...')
        runstart = time() # See how long the run takes
    h.dt = f.cfg['dt']  # set time step
    for key,val in f.cfg['hParams'].iteritems(): setattr(h, key, val) # set other h global vars (celsius, clamp_resist)
    f.pc.set_maxstep(10)
    mindelay = f.pc.allreduce(f.pc.set_maxstep(10), 2) # flag 2 returns minimum value
    if f.rank==0 and f.cfg['verbose']: print 'Minimum delay (time-step for queue exchange) is ',mindelay
    
    # reset all netstims so runs are always equivalent
    for cell in f.net.cells:
        for stim in cell.stims:
            if 'hRandom' in stim:
                stim['hRandom'].Random123(cell.gid, f.sim.id32('%d'%(stim['seed'])))
                stim['hRandom'].negexp(1)

    init()
    f.pc.psolve(f.cfg['duration'])
    if f.rank==0: 
        runtime = time()-runstart # See how long it took
        print('  Done; run time = %0.2f s; real-time ratio: %0.2f.' % (runtime, f.cfg['duration']/1000/runtime))
    f.pc.barrier() # Wait for all hosts to get to this point
    timing('stop', 'runTime')
Пример #2
0
    def go(self, sim_time=None):
        """
        Launch a simulation of a given time

        Parameters
        ----------
        sim_time: an integer
            the time in millisecond of the simulation
            it replaces the self.sim_time if defined

        Comments
        --------
        It seems that when multiple go method are done it does not
        change the output vector.
        """

        h.t = 0
        #Start recording
        self.set_recording()
        h.dt = self.dt
        self.cell.initialise()
        init()
        #while h.t < self.sim_time: #I was using this procedure before do not know which one is better
        #    h.fadvance()
        if sim_time:
            run(sim_time)
        else:
            run(self.sim_time)
Пример #3
0
    def _sim_one(self, ps, rng):
        """
        Run the simulation for one setting of parameters.
        """

        # set parameters
        h.IN.soma[0](0.5).g_pas = ps[0]  # g_leak
        h.IN.soma[0](0.5).gnabar_hh2 = ps[1]  # gbar_Na
        h.IN.soma[0](0.5).gkbar_hh2 = ps[2]  # gbar_K
        h.IN.soma[0](0.5).gkbar_im = ps[3]  # gbar_M
        h.IN.soma[0](0.5).e_pas = -ps[4]  # E_leak
        h.IN.soma[0](0.5).ena = ps[5]  # E_Na
        h.IN.soma[0](0.5).ek = -ps[6]  # E_K
        h.IN.soma[0](0.5).vtraub_hh2 = -ps[7]  # V_T
        h.IN.soma[0](0.5).kbetan1_hh2 = ps[8]  # k_betan1
        h.IN.soma[0](0.5).kbetan2_hh2 = ps[9]  # k_betan2
        h.taumax_im = ps[10]  # tau_max
        sigma = ps[11]  # sigma

        # set up current injection of noise
        Iinj = rng.normal(0.5, sigma, np.array(h.t_vec).size)
        Iinj_vec = h.Vector(Iinj)
        Iinj_vec.play(h.El._ref_amp, h.t_vec)

        # initialize and run
        neuron.init()
        h.finitialize(h.v_init)
        neuron.run(h.tstop)

        self.n_sims += 1

        return np.array(h.v_vec)
Пример #4
0
def run_simulation(record_site, stim, simulation_time=5000, noise_amplitude=0):
    rec_t, rec_v, rec_ca = record(record_site)
    cvode = nrn.CVode()

    if noise_amplitude == 0:
        cvode.active(1)

        nrn.finitialize(-60)
        neuron.init()

        neuron.run(simulation_time)

    else:
        cvode.active(0)

        nrn.dt = 0.25
        nrn.finitialize(-60)
        neuron.init()

        n_steps = int(np.ceil(simulation_time / nrn.dt)) + 1
        noise = noise_amplitude * np.random.normal(size=n_steps) / np.sqrt(
            nrn.dt)

        # Add noise
        i = 0
        while nrn.t < simulation_time:
            stim.amp = noise[i]
            nrn.fadvance()

            i += 1

    return np.array(rec_t), np.array(rec_v), np.array(rec_ca)
Пример #5
0
def runSimWithIntervalFunc(interval, func):
    f.pc.barrier()
    timing('start', 'runTime')
    if f.rank == 0:
        print('\nRunning...')
        runstart = time() # See how long the run takes
    h.dt = f.cfg['dt']
    f.pc.set_maxstep(10)
    mindelay = f.pc.allreduce(f.pc.set_maxstep(10), 2) # flag 2 returns minimum value
    if f.rank==0 and f.cfg['verbose']: print 'Minimum delay (time-step for queue exchange) is ',mindelay
    
    # reset all netstims so runs are always equivalent
    for cell in f.net.cells:
        for stim in cell.stims:
            stim['hRandom'].Random123(cell.gid, f.sim.id32('%d'%(f.cfg['seeds']['stim'])))
            stim['hRandom'].negexp(1)

    init()

    #progUpdate = 1000  # update every second
    while round(h.t) < f.cfg['duration']:
        f.pc.psolve(min(f.cfg['duration'], h.t+interval))
        #if f.cfg['verbose'] and (round(h.t) % progUpdate):
            #print(' Sim time: %0.1f s (%d %%)' % (h.t/1e3, int(h.t/f.cfg['duration']*100)))
        func(h.t) # function to be called at intervals

    if f.rank==0: 
        runtime = time()-runstart # See how long it took
        print('  Done; run time = %0.2f s; real-time ratio: %0.2f.' % (runtime, f.cfg['duration']/1000/runtime))
    f.pc.barrier() # Wait for all hosts to get to this point
    timing('stop', 'runTime')
Пример #6
0
def run(duration, anfs):
    """Run a simulation of spiral ganglion neurons.

    This function takes care of proper acoustic and electric
    initialization.

    Parameters
    ----------
    duration : float
        Duration of the simulation in seconds.
    anfs : list of sg.ANF objects
        List of sg.ANF objects for initialization.

    """
    for anf in anfs:
        if anf.electrodes:
            anf.einit()

    neuron.init()

    for anf in anfs:
        if len(anf.vesicles) > 0:
            anf.ainit()

    neuron.run(duration * 1e3)      # s -> ms
Пример #7
0
 def initialize(self):
     self.records = self.build_records(self.soma)
     
     assign_hoc_globals(self.settings)
     
     neuron.h.dt = self.dt
     neuron.h.finitialize(self.settings['v_init'])
     neuron.init()
Пример #8
0
def run(cmd):
    global h, t, soma, ic, vc, syn, icRec, vcRec, vcrs
    icRec.clear()
    vcRec.clear()

    dt = cmd['dt'] * 1e3  ## convert s -> ms
    h.dt = dt
    data = cmd['data']
    mode = cmd['mode']
    #print "data:", data.min(), data.max()

    #times = h.Vector(np.linspace(h.t, h.t+len(data)*dt, len(data)))
    #print "times:", times.min(), times.max()
    if mode == 'ic':
        #ic.delay = h.t
        ic.delay = 0
        vc.rs = 1e9
        im = h.Vector(data * 1e9)
        im.play(ic._ref_amp, dt)

    elif mode == 'vc':
        #vc.amp1 = data[0]
        vc.rs = vcrs
        ic.delay = 1e9
        #vc.dur1 = h.t
        vm = h.Vector(data * 1e3)
        vm.play(vc._ref_amp1, dt)

        syn.onset = 400.  #ms
        syn.tau = 1.5  # ms
        syn.gmax = 0.04  # umho
        syn.e = -7.0  # mV
        #syn.i	---	nA

    else:
        sys.stderr.write("Unknown mode '%s'" % sys.argv[1])
        raise Exception("Unknown mode '%s'" % sys.argv[1])

    #t2 = t + dt * (len(data)+2)
    #print "run until:", t2
    neuron.init()
    h.finitialize(-65.)
    tstop = (dt * len(data) + 2)
    neuron.run(tstop)  #dt * (len(data)+2))
    #neuron.run(t2)
    #t = t2

    #print len(out), out
    #out = np.array(out)[:len(data)]

    if mode == 'ic':
        out = np.array(icRec)[:len(data)] * 1e-3 + np.random.normal(
            size=len(data), scale=0.3e-3)
    elif mode == 'vc':
        out = np.array(vcRec)[:len(data)] * 1e-9 + np.random.normal(
            size=len(data), scale=3.e-12)
    return out
Пример #9
0
 def run(self, sim_time=None):
     self.set_recording()
     neuron.h.dt = self.dt
     neuron.h.finitialize(self.cell.E)
     neuron.init()
     if sim_time:
         neuron.run(sim_time)
     else:
         neuron.run(self.sim_time)
     self.run_already = True
Пример #10
0
 def go(self, sim_time=None):
   self.set_recording()
   neuron.h.dt = self.dt
   neuron.h.finitialize(self.cell.E)
   neuron.init()
   if sim_time:
     neuron.run(sim_time)
   else:
     neuron.run(self.sim_time)
   self.go_already = True
Пример #11
0
	def evaluate(self, p, sim, plt):
		""" gainbias drug manipulation """
		if p.neuron_type != 'Bioneuron':
			# Scale the gains and biases
			self.wm.gain = sim.data[self.wm].gain * p.drug_gainbias[0]
			self.wm.bias = sim.data[self.wm].bias * p.drug_gainbias[1]
			# Set the solver of each connection out of wm to a ProxySolver.
			# This prevents the builder from calculating new optimal decoders
			# for the new gain/bias values,
			# which would effectively 'adapt' the network to the drug stimulation
			self.wm_recurrent.solver = ProxySolver(
				sim.model.params[self.wm_recurrent].weights)
			self.wm_to_decision.solver = ProxySolver(
				sim.model.params[self.wm_to_decision].weights)
			# Rebuild the network to affect the gain/bias change
			sim = nengo.Simulator(self.network,
				seed=p.seed, dt=p.dt)  # , progress_bar=False
		else:
			# Apply the HCN channel opening/closing by manipulating g_HCN (gbar_ih in bahl.hoc)
			for nrn in sim.data[self.wm.neurons]:
				for seg in range(nrn.cell.apical.nseg):
					loc = 1.0 * seg / nrn.cell.apical.nseg  # 0.0 to 1.0
					nrn.cell.apical(loc).gbar_ih *= p.drug_biophysical
				for seg in range(nrn.cell.basal.nseg):
					loc = 1.0 * seg / nrn.cell.basal.nseg  # 0.0 to 1.0
					nrn.cell.basal(loc).gbar_ih *= p.drug_biophysical
				for seg in range(nrn.cell.tuft.nseg):
					loc = 1.0 * seg / nrn.cell.tuft.nseg  # 0.0 to 1.0
					nrn.cell.tuft(loc).gbar_ih *= p.drug_biophysical
				# nrn.cell.soma(0.5).gbar_nat *= p.drug_biophysical
			neuron.init()

		print 'Running Trial...'
		sim.run(p.t_cue+p.t_delay)

		if p.neuron_type == 'LIF':
			wm_data = sim.data[self.probe_wm]
		elif p.neuron_type == 'Bioneuron':
			lpf = nengo.Lowpass(p.tau_wm)
			act_bio = lpf.filt(sim.data[self.probe_spikes], dt=p.dt_sample)
			# wm_data = np.dot(act_bio, self.wm_recurrent.solver.decoders_bio)
			wm_data = np.dot(act_bio, self.d_readout)
			# cheaty way
			# oracle_solver = nengo.solvers.LstsqL2(reg=0.01)
			# decoders_bio_new = oracle_solver(self.act_bio, self.target)[0]
			# wm_data = np.dot(act_bio, decoders_bio_new)
			# wm_data = self.target  # plot ideal value to make sure decoding has correct target

		return dict(
			time=np.arange(p.dt, p.t_cue+p.t_delay, p.dt_sample),
			wm=wm_data,
			output=sim.data[self.probe_output],
			spikes=sim.data[self.probe_spikes],
			encoder=sim.data[self.wm].encoders
			)
Пример #12
0
    def __init__(self, spikes, connections):
        self.spikes = spikes

        self.connections = connections

        self.reads = [spikes]
        self.updates = []
        self.sets = []
        self.incs = []

        neuron.init()
Пример #13
0
    def __init__(self, spikes, connections):
        self.spikes = spikes

        self.connections = connections

        self.reads = [spikes]
        self.updates = []
        self.sets = []
        self.incs = []

        neuron.init()
Пример #14
0
 def run(self):
     if self.verbose: print "Initializing Simulation"
     self.set_stim()
     neuron.h.dt = self.dt
     neuron.h.celsius = 36
     neuron.h.finitialize(-60)
     neuron.h.load_file('parcom.hoc')
     neuron.init()
     if self.verbose: print "...Running Simulation"
     neuron.run(self.sim_time)
     if self.verbose: print "...Simulation Complete\n"
Пример #15
0
    def __init__(self, neurons, J, output, voltage):
        self.neurons = neurons
        self.J = J
        self.output = output
        self.voltage = voltage

        self.reads = [J]
        self.sets = [output, voltage]
        self.updates = []
        self.incs = []

        self.cells = [self.neurons.create() for i in range(len(self.J))]
        neuron.init()
Пример #16
0
    def __init__(self, neurons, J, output, voltage):
        self.neurons = neurons
        self.J = J
        self.output = output
        self.voltage = voltage

        self.reads = [J]
        self.sets = [output, voltage]
        self.updates = []
        self.incs = []

        self.cells = [self.neurons.create() for i in range(self.J.shape[0])]
        neuron.init()
Пример #17
0
def run(duration, objects=None):

    if objects is not None:
        for obj in objects:
            obj.pre_init()

    neuron.init()

    if objects is not None:
        for obj in objects:
            obj.post_init()

    neuron.run(duration * 1000)
Пример #18
0
 def go(self, simTime=None):
     self.set_recording()
     self.h.celsius = self.T
     print 'Temperature = %d' % int(self.h.celsius)
     #        self.h.dt = self.dt
     self.cvode = self.h.CVode()
     self.cvode.active()
     self.h.finitialize(self.cell.E)
     neuron.init()
     if simTime:
         neuron.run(simTime)
     else:
         neuron.run(self.simTime)
     self.go_already = True
Пример #19
0
def run(cmd):
    global t, soma, ic, vc, icRec, vcRec, vcrs
    icRec.clear()
    vcRec.clear()
    
    dt = cmd['dt'] * 1e3  ## convert s -> ms
    h.dt = dt
    data = cmd['data']
    mode = cmd['mode']
    #print "data:", data.min(), data.max()

    #times = h.Vector(np.linspace(h.t, h.t+len(data)*dt, len(data)))
    #print "times:", times.min(), times.max()
    if mode == 'ic':
        #ic.delay = h.t
        ic.delay = 0
        vc.rs = 1e9
        im = h.Vector(data * 1e9)
        im.play(ic._ref_amp, dt)

    elif mode == 'vc':
        #vc.amp1 = data[0]
        vc.rs = vcrs
        ic.delay = 1e9
        #vc.dur1 = h.t
        vm = h.Vector(data * 1e3)
        vm.play(vc._ref_amp1, dt)
        
    else:
        sys.stderr.write("Unknown mode '%s'" % sys.argv[1])
        raise Exception("Unknown mode '%s'" % sys.argv[1])

    #t2 = t + dt * (len(data)+2)
    #print "run until:", t2
    neuron.init()
    #neuron.finitialize(-65)
    neuron.run(dt * (len(data)+2))
    #neuron.run(t2)
    #t = t2

    #print len(out), out
    #out = np.array(out)[:len(data)]

    if mode == 'ic':
        out = np.array(icRec)[:len(data)] * 1e-3 + np.random.normal(size=len(data), scale=0.3e-3)
    elif mode == 'vc':
        out = np.array(vcRec)[:len(data)] * 1e-9 + np.random.normal(size=len(data), scale=3.e-12)
    
    return out
Пример #20
0
    def go(self, sim_time=None):
        """
        Start the simulation once it's been intialized
        """

        self.set_recording()
        h.dt = self.dt

        h.finitialize(self.v_init)
        neuron.init()
        if sim_time:
            neuron.run(sim_time)
        else:
            neuron.run(self.sim_time)
        self.go_already = True
Пример #21
0
    def go(self, sim_time=None):
        """
        Start the simulation once it's been intialized
        """

        self.set_recording()
        h.dt = self.dt
        
        h.finitialize(self.v_init)
        neuron.init()
        if sim_time:
            neuron.run(sim_time)
        else:
            neuron.run(self.sim_time)
        self.go_already = True
Пример #22
0
def run_simulation(record_site):
    """
    Runs the NEURON simulation
    :param record_site: Where to record membrane potential from. Example: soma(0.5), where 0.5 means 'center',
           0 would mean start, and 1 would mean at the end of the segment in question.
    :return: Time and voltage numpy arrays
    """
    rec_t = nrn.Vector()
    rec_t.record(nrn._ref_t)
    rec_v = nrn.Vector()
    rec_v.record(record_site._ref_v)
    neuron.h.dt = 2**-3
    nrn.finitialize(-65)
    neuron.init()
    neuron.run(200)
    return np.array(rec_t), np.array(rec_v)
Пример #23
0
def run_simulation(record_site):
    """
    Runs the NEURON simulation
    :param record_site: Where to record membrane potential from. Example: soma(0.5), where 0.5 means 'center',
           0 would mean start, and 1 would mean at the end of the segment in question.
    :return: Time and voltage numpy arrays
    """
    rec_t = nrn.Vector()
    rec_t.record(nrn._ref_t)
    rec_v = nrn.Vector()
    rec_v.record(record_site._ref_v)
    neuron.h.dt = 2**-3
    nrn.finitialize(-65)
    neuron.init()
    neuron.run(200)
    return np.array(rec_t), np.array(rec_v)
Пример #24
0
    def __init__(self, neurons, J, output, voltage, current=None):
        self.neurons = neurons
        self.J = J
        self.output = output
        self.voltage = voltage

        args = [] if current is None else [current]
        self.args = args

        self.reads = [J]
        self.sets = [output, voltage] + args
        self.updates = []
        self.incs = []

        self.cells = [self.neurons.create() for i in range(self.J.shape[0])]
        neuron.init()
Пример #25
0
def runSim():
    f.pc.barrier()
    timing('start', 'runTime')
    if f.rank == 0:
        print('\nRunning...')
        runstart = time() # See how long the run takes
    h.dt = f.cfg['dt']
    f.pc.set_maxstep(10)
    mindelay = f.pc.allreduce(f.pc.set_maxstep(10), 2) # flag 2 returns minimum value
    if f.rank==0 and f.cfg['verbose']: print 'Minimum delay (time-step for queue exchange) is ',mindelay
    init()
    f.pc.psolve(f.cfg['duration'])
    if f.rank==0: 
        runtime = time()-runstart # See how long it took
        print('  Done; run time = %0.2f s; real-time ratio: %0.2f.' % (runtime, f.cfg['duration']/1000/runtime))
    f.pc.barrier() # Wait for all hosts to get to this point
    timing('stop', 'runTime')
Пример #26
0
def runSimWithIntervalFunc (interval, func):
    from .. import sim

    sim.pc.barrier()
    sim.timing('start', 'runTime')
    preRun()
    init()
    if sim.rank == 0: print('\nRunning...')

    while round(h.t) < sim.cfg.duration:
        sim.pc.psolve(min(sim.cfg.duration, h.t+interval))
        func(h.t) # function to be called at intervals

    sim.pc.barrier() # Wait for all hosts to get to this point
    sim.timing('stop', 'runTime')
    if sim.rank==0:
        print(('  Done; run time = %0.2f s; real-time ratio: %0.2f.' %
            (sim.timingData['runTime'], sim.cfg.duration/1000/sim.timingData['runTime'])))
Пример #27
0
stim.dur = 3


# You can play with the NEURON-gui by typing
# >>> from neuron import gui
# For an overview (gui menu bar): Tools -> Model View -> 1 real cell -> root...

# Record Time from NEURON (neuron.h._ref_t)
rec_t = neuron.h.Vector()
rec_t.record(neuron.h._ref_t)
# Record Voltage from the center of the soma
rec_v = neuron.h.Vector()
rec_v.record(soma(0.5)._ref_v)

neuron.h.finitialize(-60)
neuron.init()
neuron.run(5)

# Plot the recordings with matplotlib
# ===================================

import matplotlib.pyplot as plt

# get values from NEURON-vector format into Python format
times = [] # Use list to add another trace later.
voltages = []
times.append(list(rec_t)) # alternativ to `list(rec_t)`: `numpy.array(rec_t)`
voltages.append(list(rec_v))
# check types by:
# >>> type(rec_t)
# >>> type(time[0])
    def run(self, t=20, mode='batch'):
        from neuron import h
        import neuron
        import numpy as np

        h.celsius = 40.
        h.dt = 0.0025
        h.finitialize(-75)
        neuron.init()

        assert mode in {'batch', 'step'}
        if mode == 'batch':
            for p in self.populations:
                assert p.record
            print "run started"
            neuron.run(t)
            print "run ended"
            imem = []
            iloc = []
            for p in self.populations:
                for n in p.neurons:
                    imem_n, iloc_n = n.nodes_imem_loc()
                    imem.append(imem_n)
                    iloc.append(iloc_n)

            for n in self.neurons:
                imem_n, iloc_n = n.nodes_imem_loc()
                imem.append(imem_n)
                iloc.append(iloc_n)

            print "calc potentials started"
            for e in self.electrodes:
                e.calc_fields(iloc, imem)
                e.calc_csd(iloc, imem)
            print "calc potentials ended"

        if mode == 'step':
            from time import time

            counter = 0
            interval = 1

            t0 = time()
            ti = h.t
            print "starting stepwise run"
            for p in self.populations:
                assert not p.record
            #TODO: stepwise initialisation
            # do not record currents over time
            imem = []
            iloc = []

            while h.t < t:
                h.fadvance()
                counter += 1.
                if np.mod(counter, interval) == 0:
                    iloc = []
                    imem.append(np.array([[]]))
                    #rtfactor = (h.t - ti) * 1E-3 / (time() - t0)
                    #print 't = %.0f, realtime factor: %.3f' % (h.t, rtfactor)
                    t0 = time()
                    ti = h.t

                    for p in self.populations:
                        imem_n, iloc_n = p.nodes_imem_loc()
                        imem[-1] = np.append(imem[-1], imem_n)
                        iloc.append(iloc_n)

                    for n in self.neurons:
                        imem_n, iloc_n = n.nodes_imem_loc()
                        imem[-1] = np.append(imem[-1], imem_n)
                        iloc.append(iloc_n)

            imem, iloc = np.vstack(imem), np.vstack(iloc)
            for e in self.electrodes:
                e.calc_fields(iloc, imem.T)
Пример #29
0
rec_v = h.Vector()
#rec_v.record(arms[0][9](0.5)._ref_v)
rec_v.record(muscle(0.5)._ref_v)

rec_ina = h.Vector()
rec_ina.record(muscle(0.5)._ref_ina)

rec_ik = h.Vector()
rec_ik.record(muscle(0.5)._ref_ik)

rec_ica = h.Vector()
rec_ica.record(muscle(0.5)._ref_ica)

h.dt = 0.05
h.finitialize(-70.0)
neuron.init()
sim_time = 500

if sim_time:
    neuron.run(sim_time)
else:
    neuron.run(sim_time)

x = np.array(rec_t)
y = np.array(rec_v)
plt.figure(1)
plt.subplot(411)
plt.plot(x, y)

na = np.array(rec_ina)
plt.subplot(412)
def run_simulation(soma, simulation_time=5000, noise_amplitude=0, dt=0.01):
    """
    Runs the NEURON simulation.

    Parameters
    ----------
    record_site : neuron.Segment
        Where to record membrane potential from. Example: soma(0.5), where 0.5
        means 'center', 0 would mean start, and 1 would mean at the end of the
        segment in question.
    simulation_time : {float, int}, optional
        Simulation time in ms. Default is 5000 ms.
    noise_amplitude : float, optional
        The amplitude of the noise added to the model, in nA. If 0, no noise is added.
        Note that the model uses adaptive timesteps if there is no noise,
        and fixed timesteps with dt=0.01 if there is noise. Default is 0.
    dt : float, optional
        Time step of the simulation. Only used when there is noise,
        otherwise adaptive time steps is used. Default is 0.01.

    Returns
    -------
    time : array
        Time array for the simulation.
    voltage : array
        Voltage array for the simulation.
    """
    rec_t, rec_v = record(soma(0.5))

    cvode = nrn.CVode()

    if noise_amplitude == 0:
        cvode.active(1)

        nrn.finitialize(-60)
        neuron.init()

        neuron.run(simulation_time)

    else:
        cvode.active(0)

        noise_stim = insert_current_clamp(soma(0.5), duration=simulation_time)

        nrn.dt = dt
        nrn.finitialize(-60)
        neuron.init()

        n_steps = int(np.ceil(simulation_time / nrn.dt)) + 1
        noise = noise_amplitude * np.random.normal(size=n_steps) / np.sqrt(
            nrn.dt)

        # Add noise
        i = 0
        while nrn.t < simulation_time:
            noise_stim.amp = noise[i]
            nrn.fadvance()

            i += 1

    return np.array(rec_t), np.array(rec_v)
Пример #31
0
def build_bias(model, bioensemble, biases, method, bias_gain=5e-5):
    rng = np.random.RandomState(bioensemble.seed)
    neurons_lif = 100
    neurons_bio = bioensemble.n_neurons
    tau = 0.01

    lif = nengo.Ensemble(
            neuron_type=nengo.LIF(),
            dimensions=1,
            n_neurons=neurons_lif,
            seed=bioensemble.seed,
            add_to_container=False)
    model.seeds[lif] = bioensemble.seed  # seeds normally set early in builder
    model.build(lif)  # add to the model
    model.add_op(Copy(Signal(0), model.sig[lif]['in'], inc=True))  # connect input(t)=1
    A = get_activities(model.params[lif],  # grab tuning curve activities
        lif,
        model.params[lif].eval_points)

    # Desired output function Y -- just repeat "bias" m times
    Y = np.tile(biases, (A.shape[0], 1))
    bias_decoders = nengo.solvers.LstsqL2()(A, Y)[0]

    # initialize synaptic locations
    syn_loc = get_synaptic_locations(
        rng,
        neurons_lif,
        neurons_bio,
        n_syn=1)
    syn_weights = np.zeros((
        neurons_bio,
        neurons_lif,
        syn_loc.shape[2]))
    if method == 'weights':
        for b in range(syn_weights.shape[0]):
            syn_weights[b] = rng.uniform(np.max(biases), np.min(biases), size=syn_weights[b].shape)
    if method == 'weights_fixed':
        for b in range(syn_weights.shape[0]):
            syn_weights[b] = bias_gain * biases[b]**5 * np.ones(syn_weights[b].shape)

    # unit test that synapse and weight arrays are compatible shapes
    if not syn_loc.shape[:-1] == bias_decoders.T.shape:
        raise BuildError("Shape mismatch: syn_loc=%s, bias_decoders=%s"
                         % (syn_loc.shape[:-1], bias_decoders))

    # add synapses to the bioneurons with weights = bias_decoders
    neurons = model.params[bioensemble.neurons]
    for j, bahl in enumerate(neurons):
        assert isinstance(bahl, Bahl)
        loc = syn_loc[j]
        bahl.synapses[lif] = np.empty(
            (loc.shape[0], loc.shape[1]), dtype=object)
        for pre in range(loc.shape[0]):
            for syn in range(loc.shape[1]):
                # section = bahl.cell.tuft(loc[pre, syn])
                section = bahl.cell.apical(loc[pre, syn])
                # w_ij = np.dot(decoders[pre], gain * encoder)
                if method == 'decode':
                    syn_weights[j, pre, syn] = bias_decoders[pre, j]
                w_ij = syn_weights[j, pre, syn]
                synapse = ExpSyn(section, w_ij, tau, loc[pre, syn])
                bahl.synapses[lif][pre][syn] = synapse
    neuron.init()

    model.add_op(TransmitSpikes(
        lif, bioensemble, None, neurons,
        model.sig[lif]['out'], states=[model.time]))
Пример #32
0
def build_bioneurons(model, neuron_type, neurons):
    ens = neurons.ensemble
    bias_method = neuron_type.bias_method
    # todo: generalize to new NEURON models specified by neuron_type
    bioneurons = [Bahl() for _ in range(ens.n_neurons)]
    # todo: call user-defined function that introduces variance into specific
    # NEURON parameters in each bioneuron, to encourage heterogeneity
    neuron.init()

    model.sig[neurons]['voltage'] = Signal(
        np.zeros(ens.n_neurons),
        name='%s.voltage' % ens.label)
    op = SimBioneuron(neuron_type=neuron_type,
                       neurons=bioneurons,
                       output=model.sig[neurons]['out'],
                       voltage=model.sig[neurons]['voltage'],
                       states=[model.time])

    # Initialize encoders, gains, and biases according to some heuristics,
    # unless the user has specified them already.
    # Note: setting encoders/gains/biases in this way doesn't really
    # respect the high-level ordering of the nengo build process.
    # This can generate hard-to-track problems related to these attributes.
    # However, setting them like 'neurons' are set below may not be possible
    # because these attributes are used in more places in the build process.
    rng = np.random.RandomState(seed=ens.seed)
    if hasattr(ens, 'encoders') and ens.encoders is not None:
        if not isinstance(ens.encoders, np.ndarray):
            ens.encoders = nengo.dists.get_samples(ens.encoders, ens.n_neurons, ens.dimensions, rng)
    else:
        ens.encoders = gen_encoders(ens.n_neurons, ens.dimensions, ens.radius, rng)

    if hasattr(ens, 'gain') and ens.gain is not None:
        if not isinstance(ens.gain, np.ndarray):
            ens.gain = nengo.dists.get_samples(ens.gain, ens.n_neurons, 1, rng)[:,0]
    else:
        ens.gain = gen_gains(ens.n_neurons, ens.dimensions, ens.radius, rng)

    if hasattr(ens, 'bias') and ens.bias is not None:
        if not isinstance(ens.bias, np.ndarray):
            ens.bias = nengo.dists.get_samples(ens.bias, ens.n_neurons, 1, rng)[:,0]
    else:
        ens.bias = gen_bias(ens.n_neurons, ens.dimensions, ens.radius, rng, method=bias_method)

    # if (hasattr(ens, 'encoders')
    #         and ens.encoders is not None
    #         and not isinstance(ens.encoders, np.ndarray)):
    #     ens.encoders = nengo.dists.get_samples(ens.encoders, ens.n_neurons, ens.dimensions, rng)
    # else:
    #     ens.encoders = gen_encoders(
    #         ens.n_neurons,
    #         ens.dimensions,
    #         ens.radius,
    #         rng)
    # if (hasattr(ens, 'gain')
    #         and ens.gain is not None
    #         and not isinstance(ens.gain, np.ndarray)):
    #     ens.gain = nengo.dists.get_samples(ens.gain, ens.n_neurons, 1, rng)[:,0]
    # else:
    #     ens.gain = gen_gains(
    #         ens.n_neurons,
    #         ens.dimensions,
    #         ens.radius,
    #         rng)
    # if (hasattr(ens, 'bias')
    #         and ens.bias is not None
    #         and not isinstance(ens.bias, np.ndarray)):
    #     ens.bias = nengo.dists.get_samples(ens.bias, ens.n_neurons, 1, rng)[:,0]
    # else:
    #     ens.bias = gen_biases(
    #         ens.n_neurons,
    #         ens.dimensions,
    #         ens.radius,
    #         rng,
    #         method='decode')

    model.add_op(op)

    assert neurons not in model.params
    model.params[neurons] = bioneurons

    # Build a bias-emulating connection
    build_bias(model, ens, ens.bias, method=bias_method)
Пример #33
0
def build_connection(model, conn):
    """
    Method to build connections into bioensembles.
    Calculates the optimal decoders for this conneciton as though
    the presynaptic ensemble was connecting to a hypothetical LIF ensemble.
    These decoders are used to calculate the synaptic weights
    in init_connection().
    Adds a transmit_spike operator for this connection to the model
    """

    conn_pre = deref_objview(conn.pre)
    conn_post = deref_objview(conn.post)
    rng = np.random.RandomState(model.seeds[conn])

    if isinstance(conn_pre, nengo.Ensemble) and \
            isinstance(conn_pre.neuron_type, BahlNeuron):
        # todo: generalize to custom online solvers
        if not isinstance(conn.solver, NoSolver) and conn.syn_weights is None:
            raise BuildError("Connections from bioneurons must provide a NoSolver or syn_weights"
                            " (got %s from %s to %s)" % (conn.solver, conn_pre, conn_post))

    if (isinstance(conn_post, nengo.Ensemble) and \
            isinstance(conn_post.neuron_type, BahlNeuron)):
        if not isinstance(conn_pre, nengo.Ensemble) or \
                'spikes' not in conn_pre.neuron_type.probeable:
            raise BuildError("May only connect spiking neurons (pre=%s) to "
                             "bioneurons (post=%s)" % (conn_pre, conn_post))
        """
        Given a parcicular connection, labeled by conn.pre,
        Grab the initial decoders
        Generate locations for synapses, then either
        (a) Create synapses with weight equal to
            w_ij=np.dot(d_i,alpha_j*e_j)/n_syn, where
                - d_i is the initial decoder,
                - e_j is the single bioneuron encoder
                - alpha_j is the single bioneuron gain
                - n_syn normalizes total input current for multiple-synapse conns
        (b) Load synaptic weights from a prespecified matrix

        Add synapses with those weights to bioneuron.synapses,
        store this initial synaptic weight matrix in conn.weights = conn.syn_weights
        Finally call neuron.init().
        """
        if conn.syn_locs is None:
            conn.syn_locs = get_synaptic_locations(
                rng,
                conn_pre.n_neurons,
                conn_post.n_neurons,
                conn.n_syn)
        if conn.syn_weights is None:
            use_syn_weights = False
            conn.syn_weights = np.zeros((
                conn_post.n_neurons,
                conn_pre.n_neurons,
                conn.syn_locs.shape[2]))
        else:
            use_syn_weights = True
            conn.syn_weights = copy.copy(conn.syn_weights)
        if conn.learning_node is not None and hasattr(conn.learning_node, 'syn_encoders'):
            # initialize synaptic weights for EncoderNode learned connection
            use_syn_weights = True
            conn.syn_weights = np.array(conn.learning_node.update_weights())

        # Grab decoders from the specified solver (usually nengo.solvers.NoSolver(d))
        transform = full_transform(conn, slice_pre=False)
        eval_points, decoders, solver_info = build_decoders(
                model, conn, rng, transform)

        # normalize the area under the ExpSyn curve to compensate for effect of tau
        times = np.arange(0, 1.0, 0.001)
        k_norm = np.linalg.norm(np.exp((-times/conn.tau_list[0])),1)

        # todo: synaptic gains and encoders
        # print conn, conn_post.gain, conn.post.encoders
        neurons = model.params[conn_post.neurons]  # set in build_bioneurons
        for j, bahl in enumerate(neurons):
            assert isinstance(bahl, Bahl)
            loc = conn.syn_locs[j]
            encoder = conn_post.encoders[j]
            gain = conn_post.gain[j]
            bahl.synapses[conn] = np.empty(
                (loc.shape[0], loc.shape[1]), dtype=object)
            for pre in range(loc.shape[0]):
                for syn in range(loc.shape[1]):
                    if conn.sec == 'apical':
                        section = bahl.cell.apical(loc[pre, syn])
                    elif conn.sec == 'tuft':
                        section = bahl.cell.tuft(loc[pre, syn])
                    elif conn.sec == 'basal':
                        section = bahl.cell.basal(loc[pre, syn])
                    if use_syn_weights:  # syn_weights already specified
                        w_ij = conn.syn_weights[j, pre, syn]
                    else:  # syn_weights should be set by dec_pre and bio encoders/gain
                        w_ij = np.dot(decoders.T[pre], gain * encoder)
                        w_ij = w_ij / conn.n_syn / k_norm
                        conn.syn_weights[j, pre, syn] = w_ij
                    if conn.syn_type == 'ExpSyn':
                        tau = conn.tau_list[0]
                        synapse = ExpSyn(section, w_ij, tau, loc[pre, syn])
                    elif conn.syn_type == 'Exp2Syn':
                        assert len(conn.tau_list) == 2, 'Exp2Syn requires tau_rise, tau_fall'
                        tau1 = conn.tau_list[0]
                        tau2 = conn.tau_list[1]
                        synapse = Exp2Syn(section, w_ij, tau1, tau2, loc[pre, syn])
                    bahl.synapses[conn][pre][syn] = synapse
        neuron.init()

        model.add_op(TransmitSpikes(
            conn, conn_post, conn.learning_node, neurons,
            model.sig[conn_pre]['out'], states=[model.time]))
        model.params[conn] = BuiltConnection(eval_points=eval_points,
                                             solver_info=solver_info,
                                             transform=transform,
                                             weights=conn.syn_weights)

    else:  # normal connection
        return nengo.builder.connection.build_connection(model, conn)
Пример #34
0
def simple_geo(props=None, retsoma=False):
  # simulate stimulations, returns time, dend2_volt +/- soma_volt
  P = set_props()
  if props is None:
    props=P
  else:
    for k in P.keys():
      if k not in props:
        props[k] = P[k]
    for k in props.keys():
      if k not in P.keys():
        print("Don't know what the f**k %s is. Options are: " %k)
        print(list(P.keys()))
    
  # Creating the morphology
  # soma (compartment: neuron.h.Section() )
  soma = init_section(100, props['Ra'], 80, 10)
  # dendrite0
  dend_0 = init_section(200, props['Ra'], bound0)
  # dendrite1, with taper
  dend_1 = init_section(200, props['Ra'], bound0)
  diams = np.linspace(bound0, bound1, dend_1.nseg)
  rad = -1
  for seg in dend_1:
    rad = rad+1
    seg.diam = diams[rad]
  # dendrite2
  dend_2 = init_section(200, props['Ra'], bound1)

  dend_0.connect(soma, 1, 0) # connect soma(1) with dend_0(0)
  dend_1.connect(dend_0, 1, 0) # connect dend_0(1) with dend_1(0)
  dend_2.connect(dend_1, 1, 0) # connect dend_1(1) with dend_2(0)

  # Implementing a current clamp electrode
  # Locate the electrode at the center of the soma
  stim = neuron.h.IClamp(soma(0.5))
  # Setting recording paradigm
  stim.delay = props['stim_delay']
  stim.amp = props['stim_amp']
  stim.dur = props['stim_dur']
  
  # Setting passive parameters
  for sec in neuron.h.allsec():
    # Do with the present `sec`
    sec.insert('pas')
    sec.Ra = props['Ra']
    # Do for each segment within `sec`:
    for seg in sec:
      # Do with the segment `seg`:
      seg.pas.g = 0.01
      seg.pas.e = -50

  # Record Time from NEURON (neuron.h._ref_t)
  rec_t = neuron.h.Vector()
  rec_t.record(neuron.h._ref_t)
  # Record Voltage from the center of the soma and the end of dend_2
  rec_v = neuron.h.Vector()
  rec_v.record(soma(0.5)._ref_v)
  rec_2 = neuron.h.Vector()
  rec_2.record(dend_2(1)._ref_v)

  neuron.h.finitialize(-50)
  neuron.init()
  neuron.run(props['run_time'])
  
  if retsoma==True:
    return list(rec_t), list(rec_2), list(rec_v)
  else:
    return list(rec_t), list(rec_2)
Пример #35
0
    def initAndRun(self, reset_V=-65):

        nrn.h.finitialize(reset_V)
        nrn.init()
        nrn.run(self.tStop)