def record_tau(self, neuronNr, iLeak, v_rest=None): print 'now at neuron number', neuronNr # linear dependency of simulation time on 1/iLeak duration = 5.0*1000.0/float(iLeak) duration = np.min([duration, 50000.0]) # initialize pyNN mappingOffset = neuronNr if self.chipVersion == 4: mappingOffset = neuronNr - 192 p.setup(useUsbAdc=True, calibTauMem=False, calibVthresh=False, calibSynDrivers=False, calibIcb=False, mappingOffset=mappingOffset, workStationName=self.workstation) # set g_leak such that iLeak is the desired value iLeak_base = default.iLeak_base g_leak = float(iLeak)/iLeak_base # determine tau_mem, v_rest and v_reset all at once trials = 0 params = deepcopy(self.neuronParams) params['g_leak'] = g_leak if v_rest != None: params['v_rest'] = v_rest neuron = p.Population(1, p.IF_facets_hardware1, params) neuron.record() p.record_v(neuron[0], '') crossedTargetRate = False while params['v_rest'] < self.maxVRest: print 'now at trial', trials, '/ v_rest =', params['v_rest'] p.run(duration) trace = p.membraneOutput dig_spikes = neuron.getSpikes()[:,1] memtime = p.timeMembraneOutput timestep = memtime[1] - memtime[0] # if neuron spikes with too low rate, try again with higher resting potential if len(dig_spikes) < self.targetSpikes: params['v_rest'] = params['v_rest'] + self.vRestStep neuron.set(params) print 'Neuron spiked with too low rate, trying again with parameters', params trials += 1 else: # proper spiking crossedTargetRate = True break if not crossedTargetRate: utils.report('Could not find parameters for which neuron {0} spikes. Will return nan tau_mem'.format(neuronNr), self.reportFile) return np.concatenate(([iLeak], [np.nan] * 6, [params['v_rest']])) p.end() # determine tau_mem from measurements result = utils.fit_tau_mem(trace, memtime, dig_spikes, timestep=timestep, reportFile=self.reportFile) if result == None: # fit failed utils.report('Fit of membrane time constant for neuron {0} failed (iLeak = {1})'.format(neuronNr, iLeak), self.reportFile) return np.concatenate(([iLeak], [np.nan] * 6, [params['v_rest']])) return np.concatenate(([iLeak], result, [params['v_rest']]))
def recordTauRef(self, neuronNr, icb): # necessary hardware setup p.setup(useUsbAdc=True, mappingOffset=neuronNr-192, calibTauMem=True, calibVthresh=False, calibSynDrivers=False, calibIcb=False, workStationName=self.workstation) p.hardware.hwa.setIcb(icb) # observed neuron neuron = p.Population(1, p.IF_facets_hardware1, self.neuronParams) # stimulating population input = p.Population(self.inputParameters['numInputs'], p.SpikeSourceArray, self.inputParameters['inputSpikes']) # connect input and neuron conn = p.AllToAllConnector(allow_self_connections=False, weights=self.inputParameters['weight']) proj = p.Projection(input, neuron, conn, synapse_dynamics=None, target='excitatory') # record spikes and membrane potential neuron.record() p.record_v(neuron[0],'') # run experiment p.run(self.duration) # evaluate results spikesDig = neuron.getSpikes()[:,1] membrane = p.membraneOutput time = p.timeMembraneOutput # clean up p.end() # determine sampling bins timestep = time[1] - time[0] # detect analog spikes spikesAna, isiAna = utils.find_spikes(membrane, time, spikesDig, reportFile=self.reportFile) # determine refractory period from measurement of analog spikes tau_ref, tau_ref_err, doubles_spikes = utils.fit_tau_refrac(membrane, timestep, spikesAna, isiAna, noDigSpikes=len(spikesDig), reportFile=self.reportFile, debugPlot=self.debugPlot) return tau_ref, tau_ref_err, doubles_spikes, spikesDig
connInh = pynn.AllToAllConnector(weights=weightInh) connExc_strong = pynn.FixedProbabilityConnector(p_connect=1.0, weights=weightExc * 2) # 1st neuron is stimulated by background pynn.Projection(stimExc, neuronA, connExc, target="excitatory") pynn.Projection(stimInh, neuronA, connInh, target="inhibitory") # 2nd neuron is stimulated by 1st neuron pynn.Projection(neuronA, neuronB, connExc_strong, synapse_dynamics=None, target="excitatory") # define which observables to record # spike times neuronA.record() # membrane potential pynn.record_v(neuronB[0], '') # execute the experiment pynn.run(runtime) # evaluate results spikes = neuronA.getSpikes()[:,1] membrane = pynn.membraneOutput membraneTime = pynn.timeMembraneOutput pynn.end() #################################################################### # data visualization ####################################################################
def calib(self): self.result['datetime'] = dt.datetime.now() self.result['temperature'] = 'TODO' self.result['person'] = pwd.getpwuid(os.getuid()).pw_name # one setup is necessary in order to determine spikey version pynn.setup(workStationName=self.workstation, calibOutputPins=False, calibNeuronMems=False, calibTauMem=False, calibSynDrivers=False, calibVthresh=False, calibBioDynrange=False) self.chipVersion = pynn.hardware.chipVersion() for block in range(2): if not self.chipVersion == 4 or block == 1: for pin in range(4): lower = -90. upper = -40. step = (upper - lower) / self.numVsteps for vrest in numpy.arange(lower, upper+step/2., step): pin_in = numpy.nan pin_out = numpy.nan for pinBlock in range(self.numPinBlocks): neuron = block * 192 + pinBlock * 4 + pin # necessary setup mappingOffset = neuron if self.chipVersion == 4: mappingOffset = neuron % 192 pynn.setup(useUsbAdc=True, workStationName=self.workstation, mappingOffset=mappingOffset, rng_seeds=self.seeds, avoidSpikes=True, \ calibOutputPins=False, calibNeuronMems=False, calibTauMem=False, calibSynDrivers=False, calibVthresh=False) self.neuronParams['v_rest'] = vrest # set up network n = pynn.create(pynn.IF_facets_hardware1,self.neuronParams,n=1) pynn.record_v(n, '') pynn.record(n, '') #http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance traceSum = 0.0 traceSumSqr = 0.0 # execute the experiment in a loop for i in range(self.numRuns): pynn.run(self.duration, translateToBioVoltage=False) if i==0: pin_in = pynn.hardware.hwa.vouts[neuron/192,neuron%2 + 2] else: assert pin_in == pynn.hardware.hwa.vouts[neuron/192,neuron%2 + 2], 'vout should not differ' mem = pynn.membraneOutput memMean = mem.mean() traceSum += memMean traceSumSqr += numpy.power(memMean, 2) noSpikes = len(pynn.spikeOutput[1]) if not float(noSpikes) / self.duration * 1e3 == 0: self.noSpikesTotal += noSpikes print 'there are', noSpikes, 'spikes on the membrane (most likely / hopefully ghost spikes)' assert mem.std() < self.limMemStd, 'digital spikes and spikes on the membrane found!' pin_out = traceSum / self.numRuns pin_out_std = (traceSumSqr - (numpy.power(traceSum, 2) / self.numRuns)) / (self.numRuns - 1) pynn.end() print 'For neuron',neuron,'the written voltage',pin_in,'appeared on the scope as',pin_out,'/2' #save raw data newData = numpy.vstack((neuron, pin_in, pin_out, numpy.sqrt(pin_out_std))).T if self.rawData == None: self.rawData = newData else: self.rawData = numpy.vstack((self.rawData, newData)) def filter_and_fit(dataset): #filter data dataset = numpy.atleast_2d(dataset) dataToFit = numpy.atleast_2d(dataset[dataset[:,1] >= self.voltageLimLow]) dataToFit = numpy.atleast_2d(dataToFit[dataToFit[:,1] <= self.voltageLimHigh]) noPins = len(numpy.unique(numpy.array(dataset[:,0] / 192, numpy.int) * 4 + dataset[:,0] % 4)) assert (len(dataset) - len(dataToFit)) % noPins == 0, 'discarding data failed' print 'discarded', (len(dataset) - len(dataToFit)) / noPins, 'data points' #fit polynomial return numpy.polyfit(dataToFit[:,2], dataToFit[:,1], self.polyDegree) for block in range(2): if self.chipVersion == 4 and block == 0: continue for pin in range(4): #data for output pin calibration dataOnePin = self.rawData[numpy.array(self.rawData[:,0] / 192, numpy.int) * 4 + self.rawData[:,0] % 4 == block * 4 + pin] #calculate mean over neurons with same pin vouts = numpy.unique(dataOnePin[:,1]) mean = [] std = [] for vout in vouts: mean.append(numpy.mean(dataOnePin[dataOnePin[:,1] == vout][:,2])) std.append(numpy.std(dataOnePin[dataOnePin[:,1] == vout][:,2])) dataOnePinMean = numpy.vstack((numpy.zeros_like(vouts), vouts, mean, std)).T self.result['polyFitOutputPins']['pin' + str(block * 4 + pin)] = filter_and_fit(dataOnePinMean) for pinBlock in range(self.numPinBlocks): neuron = block * 192 + pinBlock * 4 + pin #data for membrane calibration of single neurons dataOneNeuron = self.rawData[self.rawData[:,0] == neuron] self.result['polyFitNeuronMems']['neuron' + str(neuron).zfill(3)] = filter_and_fit(dataOneNeuron) print 'total number of spikes', self.noSpikesTotal, 'in', len(numpy.unique(numpy.array(self.rawData[:,0] / 192, numpy.int) * 4 + self.rawData[:,0] % 4)) * (self.numVsteps + 1) * self.numPinBlocks * self.numRuns, 'runs' return self.result, self.rawData
# connect stimulus pynn.Projection(stimExc, popCollector['exc'][0], pynn.FixedProbabilityConnector(p_connect=probExcExc, weights=weightStimExcExc), target='excitatory') pynn.Projection(stimExc, popCollector['inh'][0], pynn.FixedProbabilityConnector(p_connect=probExcInh, weights=weightStimExcInh), target='excitatory') # connect synfire chain populations for popIndex in range(noPops): #if popIndex < noPops - 1: # open chain pynn.Projection(popCollector['exc'][popIndex], popCollector['exc'][(popIndex + 1) % noPops], pynn.FixedProbabilityConnector(p_connect=probExcExc, weights=weightExcExc), target='excitatory') pynn.Projection(popCollector['exc'][popIndex], popCollector['inh'][(popIndex + 1) % noPops], pynn.FixedProbabilityConnector(p_connect=probExcInh, weights=weightExcInh), target='excitatory') pynn.Projection(popCollector['inh'][popIndex], popCollector['exc'][popIndex], pynn.FixedProbabilityConnector(p_connect=probInhExc, weights=weightInhExc), target='inhibitory') # record from first neuron of first excitatory population of chain pynn.record_v(popCollector['exc'][0][0], '') # hack to elongate refractory period of all neurons # will be configurable via neuron parameters, soon pynn.hardware.hwa.setIcb(0.02) pynn.run(runtime) # collect all spikes in one array spikeCollector = np.array([]).reshape(0,2) for synType in ['exc', 'inh']: for popIndex in range(noPops): spikeCollector = np.vstack((spikeCollector, popCollector[synType][popIndex].getSpikes())) # get membrane membrane = pynn.membraneOutput