def test_noSpikesBug(): import numpy as np import pyNN.hardware.spikey as pynn duration = 10 * 1000.0 # ms neuronParams = { 'v_reset': -80.0, # mV 'e_rev_I': -80.0, # mV 'v_rest': -45.0, # mV / rest above threshold 'v_thresh': -55.0, # mV 'g_leak': 20.0, # nS / without Scherzer calib approx. tau_m = 2ms } noTrials = 100 failCount = 0 for i in range(noTrials): pynn.setup() neuron = pynn.Population(1, pynn.IF_facets_hardware1, neuronParams) neuron.record() pynn.run(duration) spikes = neuron.getSpikes()[:, 1] pynn.end() # comment this out and everything is fine if len(spikes) == 0: failCount += 1 assert failCount == 0, str( float(failCount) / noTrials * 1e2) + ' % of runs did not have spikes'
def testRegularMaxPacked(): '''Maximum rate with packing: Each clock cycle a full (filled with 3 spikes) spike packet.''' import numpy as np import pyNN.hardware.spikey as pynn duration = 10000.0 # ms h = 1e3 / 5000.0 / 2.0 # 10kHz for each of 3 sources = 30kHz spikeTimes = np.arange(0, duration + h / 2.0, h) pynn.setup() stim = pynn.Population(256, pynn.SpikeSourceArray) stim[0].set_parameters(spike_times=spikeTimes) stim[63].set_parameters(spike_times=spikeTimes) stim[127].set_parameters(spike_times=spikeTimes) neuron = pynn.Population(1, pynn.IF_facets_hardware1) pynn.Projection(stim, neuron, method=pynn.AllToAllConnector(weights=0), target='inhibitory') neuron.record() pynn.run(duration) print 'no out spikes:', len(neuron.getSpikes()) lost, sent = pynn.getInputSpikes() print 'no in spikes (lost, sent)', lost, sent assert lost == 0, 'there should not be any spikes lost!' pynn.end()
def test(): '''mapping of bio index to hardware index should work for networks where not all neurons are recorded''' pynn.setup() if mappingOffset > 0: dummy = pynn.Population(mappingOffset, pynn.IF_facets_hardware1) neuronList = [] for i in range(noNeurons): neuronList.append(pynn.Population(1, pynn.IF_facets_hardware1)) neuronList[-1].record() dummy = pynn.Population(1, pynn.IF_facets_hardware1) stim = pynn.Population(1, pynn.SpikeSourcePoisson) for neuron in neuronList: pynn.Projection(stim, neuron, pynn.AllToAllConnector(weights=pynn.minExcWeight())) pynn.run(1000.0) pynn.end() f = open('spikeyconfig.out') for line in f: for i in range(mappingOffset + 2 * noNeurons): if line.find('w ' + str(192 + i)) >= 0: weight = int(line.split(' ')[256 + 2 - 1]) print 192 + i, weight assert (weight == shouldPatternWeights[i] ), 'results do not fit expectation' f.close()
def maxSpikesIn(runtime): '''Maximum number of spikes that can be sent: Should be limited by memory size on FPGA board (256MB => 256x1024x1024x8/32x3 approx. 200e6 spikes).''' rate = 10.0 weight = 1.0 poissonParam = {'start': 0, 'duration': runtime, 'rate': rate} pynn.setup() stim = pynn.Population(256, pynn.SpikeSourcePoisson, poissonParam) neuron = pynn.Population(192, pynn.IF_facets_hardware1) prj = pynn.Projection(stim, neuron, pynn.AllToAllConnector(weights=pynn.minInhWeight() * weight), target='inhibitory') neuron.record() pynn.run(runtime) spikes = neuron.getSpikes() lost, sent = pynn.getInputSpikes() print 'spikes in / out', sent, len(spikes) pynn.end()
def test_spikey5_allneurons(): ''' Tests mapping and firing of all 384 neurons. ''' runtime = 1000.0 stimRate = 10.0 weight = 7 pynn.setup() neurons = pynn.Population(384, pynn.IF_facets_hardware1) stim = pynn.Population(10, pynn.SpikeSourcePoisson, { 'start': 0, 'duration': runtime, 'rate': stimRate }) prj = pynn.Projection( stim, neurons, method=pynn.AllToAllConnector(weights=pynn.minExcWeight() * weight), target='excitatory') pynn.run(runtime) spikes = neurons.getSpikes() print 'spikes from', len(np.unique(spikes)), 'different neurons' # TODO: check for spikes from all neurons pynn.end()
def run(mappingOffset): """ Measures firing rate of one neuron (determined by mappingOffset) in dependence on value of g_leak. If linear fit to these firing rates does not show a significantly large slope, g_leak is assumed to be not set correctly. """ pynn.setup(mappingOffset=mappingOffset, calibTauMem=False, calibSynDrivers=False, calibVthresh=False) # set v_rest over v_reset to get neuron firing neuron = pynn.Population(1, pynn.IF_facets_hardware1, { 'v_rest': pynn.IF_facets_hardware1.default_parameters['v_thresh'] + 10.0 }) neuron.record() rateList = [] for gLeak in gLeakRange: neuron.set({'g_leak': gLeak / default.iLeak_base}) pynn.hardware.hwa._neuronsChanged = True pynn.run(runtime) rateList.append( [gLeak, float(len(neuron.getSpikes())) / runtime * 1e3]) pynn.end() rateList = np.array(rateList) pol = np.polyfit(rateList[:, 0], rateList[:, 1], 1) # linear fit print 'fitted polynom:', pol assert pol[0] > slopeMin, 'rate does not change with g_leak'
def getMemLoop(): result = [] pynn.setup(useUsbAdc=True) neuron = pynn.Population(noNeurons, pynn.IF_facets_hardware1) for j in range(noNeurons): if j % 2 == 0: neuron[j].set_parameters(v_rest=vRestEven) else: neuron[j].set_parameters(v_rest=vRestOdd) neuron.record() for i in range(noNeurons): pynn.record_v(neuron[i], '') pynn.run(runtime) mem = pynn.membraneOutput spikes = neuron.getSpikes() shutil.copy(spikeyconfigFilename, spikeyconfigFilename + extListNo[i]) self.assertTrue( (float(len(spikes)) / runtime * 1e3) <= limFreq, 'there should not be any (too much) spikes') result.append([mem.mean(), mem.std()]) pynn.end() return result
def test_regular(): """Maximum rate without packing: Each second clock cycle a minimal loaded (filled with 1 spikes) spike packet.""" import numpy as np import pyNN.hardware.spikey as pynn import time np.random.seed(int(time.time())) lineDriverNo = np.random.random_integers(0, 255) print 'Using line driver number', lineDriverNo duration = 1000.0 # ms h = 1e3 / 5000.0 # 0.2 ms pynn.setup() stim = pynn.Population(256, pynn.SpikeSourceArray) stim[lineDriverNo].set_parameters( spike_times=np.arange(0, duration + h / 2.0, h)) neuron = pynn.Population(1, pynn.IF_facets_hardware1) pynn.Projection(stim, neuron, method=pynn.AllToAllConnector(weights=0), target='inhibitory') neuron.record() pynn.run(duration) lost, sent = pynn.getInputSpikes() print 'Number of input spikes (lost, sent)', lost, sent assert lost == 0, 'There should not be any spikes lost!' pynn.end()
def test_poisson(): """Test with Poisson source.""" import pyNN.hardware.spikey as pynn duration = 1000.0 # ms rate = 5000.0 # 1/s poissonParam = {'start': 0, 'duration': duration, 'rate': rate} limLost = 1.0 # % pynn.setup() stim = pynn.Population(1, pynn.SpikeSourcePoisson, poissonParam) neuron = pynn.Population(1, pynn.IF_facets_hardware1) pynn.Projection(stim, neuron, method=pynn.AllToAllConnector(weights=0), target='inhibitory') neuron.record() pynn.run(duration) lost, sent = pynn.getInputSpikes() print 'Number of input spikes (lost, sent, %lost)', lost, sent, float( lost) / sent * 1e2 assert float(lost) / sent * 1e2 < limLost, 'Too many spikes lost!' pynn.end()
def test_regular_packed(): """Maximum rate with packing: Each clock cycle a full (filled with 3 spikes) spike packet.""" import numpy as np import pyNN.hardware.spikey as pynn duration = 1000.0 # ms h = 1e3 / 5000.0 / 2.0 # 0.1 ms spikeTimes = np.arange(0, duration + h / 2.0, h) pynn.setup() stim = pynn.Population(256, pynn.SpikeSourceArray) # spikes have to be distributed over blocks of line drivers for efficient # packing stim[0].set_parameters(spike_times=spikeTimes) stim[64].set_parameters(spike_times=spikeTimes) stim[128].set_parameters(spike_times=spikeTimes) neuron = pynn.Population(1, pynn.IF_facets_hardware1) pynn.Projection(stim, neuron, method=pynn.AllToAllConnector(weights=0), target='inhibitory') neuron.record() pynn.run(duration) lost, sent = pynn.getInputSpikes() print 'Number of input spikes (lost, sent)', lost, sent assert lost == 0, 'There should not be any spikes lost!' pynn.end()
def getMem(self, voltageRest, mappingOffset, calibOutputPins, calibNeuronMems): import pyNN.hardware.spikey as pynn pynn.setup(useUsbAdc=True, avoidSpikes=True, mappingOffset=mappingOffset, calibTauMem=False, calibOutputPins=calibOutputPins, calibNeuronMems=calibNeuronMems) neuron = pynn.Population(1, pynn.IF_facets_hardware1, self.neuronParams) #neuronDummy = pynn.Population(1, pynn.IF_facets_hardware1, self.neuronParams) neuron.set({'v_rest': voltageRest}) #neuronDummy.set({'v_rest': self.voltageRange[0]}) neuron.record() pynn.record_v(neuron[0], '') pynn.run(self.runtime) mem = pynn.membraneOutput spikes = neuron.getSpikes() pynn.end() self.assertTrue( (float(len(spikes)) / self.runtime * 1e3) <= self.limFreq, 'there should not be any (too much) spikes') return mem.mean()
def withMappingOffset(mappingOffset, vrest): pynn.setup(mappingOffset=mappingOffset) pynn.Population(1, pynn.IF_facets_hardware1, vrest) pynn.run(1000.0) vout = copy.deepcopy(pynn.hardware.hwa.vouts[1, 2:4]) pynn.end() return vout
def maxRuntime(runtime): '''Maximum runtime: Limited by wrap around of counter (after approx. 6600s). Can be extended to infinitly long runtimes by considering wrap around. Subtract/Add offset to in/out spike times for each wrap around.''' rate = 1.0 weight = 1.0 poissonParam = {'start': 0, 'duration': runtime, 'rate': rate} pynn.setup() stim = pynn.Population(1, pynn.SpikeSourcePoisson, poissonParam) neuron = pynn.Population(1, pynn.IF_facets_hardware1) prj = pynn.Projection(stim, neuron, pynn.AllToAllConnector(weights=pynn.minInhWeight() * weight), target='inhibitory') neuron.record() pynn.run(runtime) spikes = neuron.getSpikes() lost, sent = pynn.getInputSpikes() print 'spikes in / out', sent, len(spikes) pynn.end()
def maxSpikesOut(runtime): '''Maximum number of spikes that can be received: Should be limited by memory size on FPGA board (128MB approx. 100e6 spikes, other half for ADC).''' neuronParams = { 'v_reset': -80.0, # mV 'e_rev_I': -80.0, # mV 'v_rest': -45.0, # mV / rest above threshold 'v_thresh': -55.0, # mV 'g_leak': 20.0, # nS / without Scherzer calib approx. tau_m = 2ms } pynn.setup() neuron = pynn.Population(192, pynn.IF_facets_hardware1, neuronParams) neuron.record() pynn.run(runtime) spikes = neuron.getSpikes()[:, 1] lost, sent = pynn.getInputSpikes() print 'spikes in / out', sent, len(spikes) pynn.end()
def run(noNeurons): runtime = 1000.0 import numpy as np import pyNN.hardware.spikey as pynn pynn.setup() neurons = pynn.Population(noNeurons, pynn.IF_facets_hardware1) neurons.record() stim = pynn.Population(10, pynn.SpikeSourcePoisson, { 'rate': 20.0, 'duration': runtime }) prj = pynn.Projection(stim, neurons, pynn.AllToAllConnector()) prj.setWeights(pynn.maxExcWeight()) pynn.run(runtime) spikes = neurons.getSpikes([]) # for neuron in np.unique(spikes[:,0]): # print 'neuron', int(neuron), 'has', len(spikes[spikes[:,0] == # neuron]), 'spikes' noSpikes = len(spikes) lost, sent = pynn.getInputSpikes() pynn.end() print 'no neurons / spikes in / lost / out:', noNeurons + 1, sent, lost, noSpikes return noSpikes
def run(lowThreshold): runtime = 1000.0 pynn.setup() # set STDP params for low threshold -> fails when vcthigh-vctlow < 0.04 if lowThreshold: pynn.hardware.hwa.setSTDPParams(0.0, default.tpcsec, default.tpcorperiod, 1.0, 1.0, 1.0, 0.98, 2.5) else: pynn.hardware.hwa.setSTDPParams(0.0, default.tpcsec, default.tpcorperiod, 1.0, 1.0, 1.0, 0.85, 2.5) neuron = pynn.Population(1, pynn.IF_facets_hardware1) spikeArray = pynn.Population(1, pynn.SpikeSourceArray) stdp_model = pynn.STDPMechanism( timing_dependence=pynn.SpikePairRule(), weight_dependence=pynn.AdditiveWeightDependence()) prj = pynn.Projection( spikeArray, neuron, method=pynn.AllToAllConnector(weights=pynn.minExcWeight() * 0), target='excitatory', synapse_dynamics=pynn.SynapseDynamics(slow=stdp_model)) pynn.run(runtime) pynn.end()
def record_tau(self, neuronNr, iLeak, v_rest=None): print 'now at neuron number', neuronNr # linear dependency of simulation time on 1/iLeak duration = 5.0*1000.0/float(iLeak) duration = np.min([duration, 50000.0]) # initialize pyNN mappingOffset = neuronNr if self.chipVersion == 4: mappingOffset = neuronNr - 192 p.setup(useUsbAdc=True, calibTauMem=False, calibVthresh=False, calibSynDrivers=False, calibIcb=False, mappingOffset=mappingOffset, workStationName=self.workstation) # set g_leak such that iLeak is the desired value iLeak_base = default.iLeak_base g_leak = float(iLeak)/iLeak_base # determine tau_mem, v_rest and v_reset all at once trials = 0 params = deepcopy(self.neuronParams) params['g_leak'] = g_leak if v_rest != None: params['v_rest'] = v_rest neuron = p.Population(1, p.IF_facets_hardware1, params) neuron.record() p.record_v(neuron[0], '') crossedTargetRate = False while params['v_rest'] < self.maxVRest: print 'now at trial', trials, '/ v_rest =', params['v_rest'] p.run(duration) trace = p.membraneOutput dig_spikes = neuron.getSpikes()[:,1] memtime = p.timeMembraneOutput timestep = memtime[1] - memtime[0] # if neuron spikes with too low rate, try again with higher resting potential if len(dig_spikes) < self.targetSpikes: params['v_rest'] = params['v_rest'] + self.vRestStep neuron.set(params) print 'Neuron spiked with too low rate, trying again with parameters', params trials += 1 else: # proper spiking crossedTargetRate = True break if not crossedTargetRate: utils.report('Could not find parameters for which neuron {0} spikes. Will return nan tau_mem'.format(neuronNr), self.reportFile) return np.concatenate(([iLeak], [np.nan] * 6, [params['v_rest']])) p.end() # determine tau_mem from measurements result = utils.fit_tau_mem(trace, memtime, dig_spikes, timestep=timestep, reportFile=self.reportFile) if result == None: # fit failed utils.report('Fit of membrane time constant for neuron {0} failed (iLeak = {1})'.format(neuronNr, iLeak), self.reportFile) return np.concatenate(([iLeak], [np.nan] * 6, [params['v_rest']])) return np.concatenate(([iLeak], result, [params['v_rest']]))
def withDummyNeurons(mappingOffset, vrest): pynn.setup() if mappingOffset > 0: pynn.Population(mappingOffset, pynn.IF_facets_hardware1) pynn.Population(1, pynn.IF_facets_hardware1, vrest) pynn.run(1000.0) vout = copy.deepcopy(pynn.hardware.hwa.vouts[1, 2:4]) pynn.end() return vout
def test_empty_exp(): """ Initialize hardware and create one neuron. """ pynn.setup() pynn.Population(1, pynn.IF_facets_hardware1) pynn.run(1000.0) pynn.end()
def runClosed(): global neurons rateOutList = [] for rate in rateRange: pynn.setup() build(rate) pynn.run(runtime) rateOutList.append( len(neurons.getSpikes()) / runtime * 1e3 / numNeurons) pynn.end() return rateOutList
def run_network(mappingOffset=0, neuronPermutation=[], noNeurons=-1): pynn.setup(mappingOffset=mappingOffset, neuronPermutation=neuronPermutation) if noNeurons == -1: noNeurons = 384 if pynn.getChipVersion() == 4: noNeurons = 192 a = pynn.Population(noNeurons, pynn.IF_facets_hardware1) b = pynn.Population(10, pynn.SpikeSourcePoisson) prj = pynn.Projection(b, a, method=pynn.AllToAllConnector()) pynn.run(1.0)
def compareSpikesToMembrane(duration): """ Tests the precise timing of digital spikes and spikes extracted from the membrane potential. The neuron is stimulated with Poisson spike sources. """ np.random.seed(int(time.time())) neuronNo = np.random.random_integers(0, 191) print 'Using neuron number', neuronNo poissonParams = {'start': 100.0, 'duration': duration - 100.0, 'rate': 30.0} # offset of 100 ms to get all spikes weightExc = 4 # digital hardware value weightInh = 15 # digital hardware value freqLimit = 1.0 # 1/s meanLimit = 0.2 # ms stdLimit = 0.2 # ms import pyNN.hardware.spikey as pynn pynn.setup(mappingOffset=neuronNo) stimExc = pynn.Population(64, pynn.SpikeSourcePoisson, poissonParams) stimInh = pynn.Population(192, pynn.SpikeSourcePoisson, poissonParams) neuron = pynn.Population(1, pynn.IF_facets_hardware1) prj = pynn.Projection(stimExc, neuron, pynn.AllToAllConnector( weights=weightExc * pynn.minExcWeight()), target='excitatory') prj = pynn.Projection(stimInh, neuron, pynn.AllToAllConnector( weights=weightInh * pynn.minInhWeight()), target='inhibitory') neuron.record() pynn.record_v(neuron[0], '') pynn.run(duration) spikes = neuron.getSpikes()[:, 1] membrane = pynn.membraneOutput memTime = pynn.timeMembraneOutput spikesMem, deriv, thresh = spikesFromMem(memTime, membrane) pynn.end() #plot(memTime, membrane, spikes, spikesMem, deriv, thresh) print 'Spikes and spikes on membrane:', len(spikes), '/', len(spikesMem) assert len(spikes) / duration * 1e3 >= freqLimit, 'Too less spikes.' assert len(spikes) == len(spikesMem), 'Spikes do not match membrane.' spikesDiff = spikesMem - spikes spikesDiffMean = np.mean(spikesDiff) spikesDiffStd = np.std(spikesDiff) print 'Offset between spikes and membrane:', spikesDiffMean, '+-', spikesDiffStd assert spikesDiffMean < meanLimit, 'Spike and membrane have too large offset.' assert spikesDiffStd < stdLimit, 'Time axes of spikes and membrane are different.'
def runLoop(): global neurons, stim rateOutList = [] pynn.setup() build(0) for rate in rateRange: poissonParams = {'start': 0, 'duration': runtime, 'rate': rate} stim.set(poissonParams) pynn.run(runtime) rateOutList.append( len(neurons.getSpikes()) / runtime * 1e3 / numNeurons) pynn.end() return rateOutList
def test_mappingOffset_and_Permutation_random(): # example with random neuron permutator trials = 3 import time seed = int(time.time()) print 'seed', seed np.random.seed(seed) pynn.setup() chipVersion = pynn.getChipVersion() pynn.end() permutatorWorking = range(384) if chipVersion == 4: permutatorWorking = range(192, 384) for i in range(trials): np.random.shuffle(permutatorWorking) mappingOffset = np.random.random_integers(0, 383) permutator = copy.copy(permutatorWorking) if chipVersion == 4: mappingOffset = np.random.random_integers(0, 191) permutator = copy.copy(permutatorWorking) + range(192) run_network(mappingOffset=mappingOffset, neuronPermutation=permutator) permutator = np.array(permutator) should = np.concatenate( (permutator[mappingOffset:384], permutator[0:mappingOffset])) if chipVersion == 4: should = np.concatenate((permutator[mappingOffset:192], permutator[ 0:mappingOffset], np.ones(192, int) * -1)) assert np.array_equal(pynn.hardware.hwa.hardwareIndexMap, should) neuronIndexMap = pynn.hardware.hwa.neuronIndexMap noNeurons = 384 if chipVersion == 4: noNeurons = 192 assert len(neuronIndexMap[ neuronIndexMap >= 0]) == noNeurons, 'number of hardware neuron IDs does not match' assert len(neuronIndexMap[ neuronIndexMap >= 0]) == noNeurons, 'hardware neuron IDs not adjacent in map' if chipVersion == 4: noNeurons += 1 # +1 for "-1" entries assert len(np.unique(neuronIndexMap) ) == noNeurons, 'not all hardware neuron IDs in map' pynn.end()
def emulation(seed, connType=0, returnValue=None): numberNeurons = 192 noInputs = 15 pynn.setup() rngPrj = pynn.random.NumpyRNG(seed=seed, parallel_safe=True) # this may not work?! neurons = pynn.Population(numberNeurons, pynn.IF_facets_hardware1) connector = None if connType == 0: connector = pynn.FixedNumberPreConnector(noInputs, weights=pynn.minExcWeight()) elif connType == 1: connector = pynn.FixedNumberPostConnector(noInputs, weights=pynn.minExcWeight()) elif connType == 2: connector = pynn.FixedProbabilityConnector(float(noInputs) / numberNeurons, weights=pynn.minExcWeight()) else: assert False, 'invalid connector type' prj = pynn.Projection(neurons, neurons, method=connector, target='inhibitory', rng=rngPrj) connList = [] for conn in prj.connections(): connList.append(conn) assert len(connList) > 0, 'no connections' assert len(connList) < numberNeurons * \ (numberNeurons - 1), 'all-to-all connection' pynn.run(1.0) pynn.end() if returnValue != None: returnValue = connList else: return connList
def emulation(doesWork): numberSynapses = 10 runtime = 1000.0 weights = range(0, numberSynapses * numberSynapses) pynn.setup() pre = pynn.Population(numberSynapses, pynn.SpikeSourcePoisson) post = pynn.Population(numberSynapses, pynn.IF_facets_hardware1) if doesWork: conn = pynn.Projection(pre, post, method=pynn.AllToAllConnector()) conn.setWeights(weights) else: conn = pynn.Projection(pre, post, method=pynn.AllToAllConnector(weights=weights)) pynn.run(runtime) pynn.end()
def emulate(): # pynn.setup(useUsbAdc=True) pynn.setup() stimI = pynn.Population(40, pynn.SpikeSourcePoisson, { 'start': 0, 'duration': runtime, 'rate': rate }) stimE = pynn.Population(20, pynn.SpikeSourcePoisson, { 'start': 0, 'duration': runtime, 'rate': rate }) neuron = pynn.Population(192, pynn.IF_facets_hardware1) prjI = pynn.Projection(stimI, neuron, pynn.AllToAllConnector(weights=weight * pynn.minInhWeight()), target='inhibitory') prjE = pynn.Projection(stimE, neuron, pynn.AllToAllConnector(weights=weight * pynn.minExcWeight()), target='excitatory') stimI.record() stimE.record() neuron.record() pynn.record_v(neuron[0], '') pynn.run(runtime) spikesInI = stimI.getSpikes() spikesInE = stimE.getSpikes() spikes = neuron.getSpikes() mem = pynn.membraneOutput print 'spikes out', len(spikes) print 'spikes in', len(spikesInI), len(spikesInE) print 'mem data points', len(mem) pynn.end()
def run(withSTDP): runtime = 1000.0 pynn.setup() stim = pynn.Population(1, pynn.SpikeSourcePoisson, { 'start': 0, 'duration': runtime, 'rate': 100.0}) neuron = pynn.Population(1, pynn.IF_facets_hardware1) if withSTDP: stdp_model = pynn.STDPMechanism(timing_dependence=pynn.SpikePairRule(), weight_dependence=pynn.AdditiveWeightDependence()) pynn.Projection(stim, neuron, method=pynn.AllToAllConnector( weights=pynn.maxExcWeight()), target='excitatory', synapse_dynamics=pynn.SynapseDynamics(slow=stdp_model)) else: pynn.Projection(stim, neuron, method=pynn.AllToAllConnector( weights=pynn.maxExcWeight()), target='excitatory') pynn.run(runtime) pynn.end()
def recordTauRef(self, neuronNr, icb): # necessary hardware setup p.setup(useUsbAdc=True, mappingOffset=neuronNr-192, calibTauMem=True, calibVthresh=False, calibSynDrivers=False, calibIcb=False, workStationName=self.workstation) p.hardware.hwa.setIcb(icb) # observed neuron neuron = p.Population(1, p.IF_facets_hardware1, self.neuronParams) # stimulating population input = p.Population(self.inputParameters['numInputs'], p.SpikeSourceArray, self.inputParameters['inputSpikes']) # connect input and neuron conn = p.AllToAllConnector(allow_self_connections=False, weights=self.inputParameters['weight']) proj = p.Projection(input, neuron, conn, synapse_dynamics=None, target='excitatory') # record spikes and membrane potential neuron.record() p.record_v(neuron[0],'') # run experiment p.run(self.duration) # evaluate results spikesDig = neuron.getSpikes()[:,1] membrane = p.membraneOutput time = p.timeMembraneOutput # clean up p.end() # determine sampling bins timestep = time[1] - time[0] # detect analog spikes spikesAna, isiAna = utils.find_spikes(membrane, time, spikesDig, reportFile=self.reportFile) # determine refractory period from measurement of analog spikes tau_ref, tau_ref_err, doubles_spikes = utils.fit_tau_refrac(membrane, timestep, spikesAna, isiAna, noDigSpikes=len(spikesDig), reportFile=self.reportFile, debugPlot=self.debugPlot) return tau_ref, tau_ref_err, doubles_spikes, spikesDig
def runTest(self): import numpy as np column = 4 row = 4 n = 20 # number of spike pairs deltaTList = [-1.0, 1.0] # ms deltaTLimit = 0.3 # allowed deviation delay = 2.9 # ms (between stimulus and post) # at beginning in ms (should be larger than max deltaT) experimentOffset = 100.0 deltaTPairs = 100.0 # time between pre-post-pairs in ms noStimulators = 3 weightStimulator = 15 # weight for stimulator neurons weightMeasure = 0 # weight for measured neuron procCorrOffset = 100.0 # time after experiment until correlations are processed in ms for deltaT in deltaTList: stimulus = np.arange(experimentOffset, (n - 0.5) * deltaTPairs + experimentOffset, deltaTPairs) self.assertTrue(len(stimulus) == n) stimulusMeasure = stimulus + delay - deltaT import pyNN.hardware.spikey as pynn import hwconfig_default_s1v2 as default pynn.setup() if column > 0: pynn.Population(column, pynn.IF_facets_hardware1) # stimulated neuron neuron = pynn.Population(1, pynn.IF_facets_hardware1) spikeSourceStim = None spikeSourceMeasure = None # stimulators above measured synapse if row < noStimulators: if row > 0: dummy = pynn.Population(row, pynn.SpikeSourceArray) spikeSourceMeasure = pynn.Population( 1, pynn.SpikeSourceArray, {'spike_times': stimulusMeasure}) spikeSourceStim = pynn.Population(noStimulators, pynn.SpikeSourceArray, {'spike_times': stimulus}) # stimulators below measured synapse if row >= noStimulators: if row > noStimulators: dummy = pynn.Population(row - noStimulators, pynn.SpikeSourceArray) spikeSourceMeasure = pynn.Population( 1, pynn.SpikeSourceArray, {'spike_times': stimulusMeasure}) # connect and record stdp_model = pynn.STDPMechanism( timing_dependence=pynn.SpikePairRule(), weight_dependence=pynn.AdditiveWeightDependence()) pynn.Projection( spikeSourceStim, neuron, method=pynn.AllToAllConnector(weights=pynn.minExcWeight() * weightStimulator), target='excitatory') prj = pynn.Projection( spikeSourceMeasure, neuron, method=pynn.AllToAllConnector(weights=pynn.minExcWeight() * weightMeasure), target='excitatory', synapse_dynamics=pynn.SynapseDynamics(slow=stdp_model)) neuron.record() ####### # RUN # ####### # correlation flags: # 0: no weight change # 1: causal weight change # 2: anti-causal weight change pynn.hardware.hwa.setLUT( [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) lastInputSpike = np.max(np.concatenate( (stimulus, stimulusMeasure))) runtime = lastInputSpike + procCorrOffset pynn.hardware.hwa.autoSTDPFrequency = runtime print 'runtime: ' + str(runtime) + '; last input spike: ' + str( lastInputSpike) + '; STDP readout: ' + str(runtime) pynn.run(runtime) # get flag and spikes corrFlag = ( np.array(prj.getWeightsHW(readHW=True, format='list')) / pynn.minExcWeight())[0] spikes = neuron.getSpikes()[:, 1] print 'stimulus:', stimulus print 'measure:', stimulusMeasure print 'post:', spikes self.assertTrue( len(stimulusMeasure) == len(spikes), 'No proper spiking!') print 'correlation flag: ' + str(corrFlag) print 'deltaT (is / should / limit):', np.mean( spikes - stimulusMeasure), '/', deltaT, '/', deltaTLimit self.assertTrue( abs(np.mean(spikes - stimulusMeasure) - deltaT) <= deltaTLimit, 'No precise spiking!') if deltaT > 0: # causal self.assertTrue(corrFlag == 1, 'Wrong correlation flag!') else: # anti-causal self.assertTrue(corrFlag == 2, 'Wrong correlation flag!') pynn.end()
def runTest(self): with_figure = False import numpy import pyNN.hardware.spikey as pynn if with_figure: import pylab # some test parameters neuron_param_even = { 'g_leak': 1.0, # nS 'tau_syn_E': 5.0, # ms 'tau_syn_I': 5.0, # ms 'v_reset': -100.0, # mV 'e_rev_I': -100.0, # mV, 'v_rest': -65.0, # mV 'v_thresh': -62.0 # mV } neuron_param_uneven = { 'g_leak': 1.0, # nS 'tau_syn_E': 5.0, # ms 'tau_syn_I': 5.0, # ms 'v_reset': -100.0, # mV 'e_rev_I': -100.0, # mV, 'v_rest': -65.0, # mV 'v_thresh': 0.0 # mV } stim_offset = 100.0 # ms stim_isi = 500.0 # ms stim_num = 10 # Number of external input spikes stim_weight = 8.0 # in units of pyn.minExcWeight stim_pop_size = 10 # size of stimulating population duration = stim_offset + ((stim_num + 1) * stim_isi) # neuron order: {0, 2, ..., 190, 1, 3, ..., 191, 192, 193, ... 343} neuron_order = range(0, 191, 2) + range(1, 192, 2) + range(192, 384, 1) if with_figure: pynn.setup(neuronPermutation=neuron_order, useUsbAdc=True) else: pynn.setup(neuronPermutation=neuron_order) # create the population with an even hardware neuron index even_population = pynn.Population(96, pynn.IF_facets_hardware1, neuron_param_even) # create the population with an uneven hardware neuron index uneven_population = pynn.Population(96, pynn.IF_facets_hardware1, neuron_param_uneven) if with_figure: pynn.record_v(even_population[0], '') # create the external stimulus stim_times = numpy.arange(stim_offset, stim_num * stim_isi, stim_isi) stim_pop = pynn.Population(stim_pop_size, pynn.SpikeSourceArray, {'spike_times': stim_times}) # connect the external simulus stim_con = pynn.AllToAllConnector(weights=stim_weight * pynn.minExcWeight()) stim_prj_even = pynn.Projection(stim_pop, even_population, stim_con) stim_prj_uneven = pynn.Projection(stim_pop, uneven_population, stim_con) # record spikes of all involved neurons even_population.record() uneven_population.record() # run the emulation pynn.run(duration) # get the spike data pre_swap_spikes_even = even_population.getSpikes() pre_swap_spikes_uneven = uneven_population.getSpikes() if with_figure: plotVoltageAndSpikes(pylab, pynn.timeMembraneOutput, pynn.membraneOutput, pre_swap_spikes_even, pre_swap_spikes_uneven) # swap the configurations pynn.set(even_population[0], pynn.IF_facets_hardware1, {'v_thresh': 0.0}) pynn.set(uneven_population[0], pynn.IF_facets_hardware1, {'v_thresh': -62.0}) # run the emulation pynn.run(duration) # get the spike data pst_swap_spikes_even = even_population.getSpikes() pst_swap_spikes_uneven = uneven_population.getSpikes() if with_figure: plotVoltageAndSpikes(pylab, pynn.timeMembraneOutput, pynn.membraneOutput, pst_swap_spikes_even, pst_swap_spikes_uneven) pre_spikes_count_even = float(len(pre_swap_spikes_even[:, 0])) pre_spikes_count_uneven = float(len(pre_swap_spikes_uneven[:, 0])) pst_spikes_count_even = float(len(pst_swap_spikes_even[:, 0])) pst_spikes_count_uneven = float(len(pst_swap_spikes_uneven[:, 0])) # let's see what we've got assert (pre_spikes_count_even > 0) assert (pst_spikes_count_uneven > 0) assert (pre_spikes_count_uneven / pre_spikes_count_even < 0.01) assert (pst_spikes_count_even / pst_spikes_count_uneven < 0.01) assert (pre_spikes_count_uneven / pst_spikes_count_uneven < 0.01) assert (pst_spikes_count_even / pre_spikes_count_even < 0.01)
def emulate(self, driverIndexExc, driverIndexInh=None, drvirise=None, drvifallExc=None, drvifallInh=None, drvioutExc=None, drvioutInh=None, filename=None, calibSynDrivers=False): '''Run emulations on hardware.''' assert self.stimParams != None, 'specifiy stimulus first' pynn.setup(calibTauMem=True, calibSynDrivers=calibSynDrivers, calibVthresh=False, calibIcb=False, workStationName=self.workstation, writeConfigToFile=False) #create neuron self.neuronList.sort() neuronCollector = [] currentIndex = 0 for neuronIndex in self.neuronList: if neuronIndex > currentIndex: #insert dummy neurons if neuronList not continuous dummyPopSize = neuronIndex - currentIndex dummy = pynn.Population(dummyPopSize, pynn.IF_facets_hardware1, self.neuronParams) currentIndex += dummyPopSize self.logger.debug('inserted ' + str(dummyPopSize) + ' dummy neurons') if neuronIndex == currentIndex: neuron = pynn.Population(1, pynn.IF_facets_hardware1, self.neuronParams) currentIndex += 1 neuron.record() neuronCollector.append(neuron) else: raise Exception('Could not create all neurons') #create input and connect to neuron synDrivers = [[driverIndexExc, 'excitatory'], [driverIndexInh, 'inhibitory']] synDrivers.sort() currentIndex = 0 for synDriver in synDrivers: toInsertIndex = synDriver[0] targetType = synDriver[1] if toInsertIndex == None: self.logger.debug('skipping ' + targetType + ' stimulation') continue if toInsertIndex > currentIndex: dummyPopSize = toInsertIndex - currentIndex dummy = pynn.Population(dummyPopSize, pynn.SpikeSourcePoisson) currentIndex += dummyPopSize self.logger.debug('inserted ' + str(dummyPopSize) + ' dummy synapse drivers') stim = pynn.Population(1, pynn.SpikeSourcePoisson, self.stimParams[targetType]) self.logger.debug('inserted 1 stimulus of type ' + targetType + ' with rate ' + str(self.stimParams[targetType]['rate'])) if targetType == 'excitatory': hardwareWeightTemp = self.hardwareWeight * pynn.minExcWeight() elif targetType == 'inhibitory': hardwareWeightTemp = self.hardwareWeight * pynn.minInhWeight() else: raise Exception('Synapse type not supported!') for neuron in neuronCollector: pynn.Projection(stim, neuron, method=pynn.AllToAllConnector(weights=hardwareWeightTemp), target=targetType) currentIndex += 1 #set custom parameters if drvirise != None: pynn.hardware.hwa.drvirise = drvirise else: pynn.hardware.hwa.drvirise = default.drvirise if drvifallExc != None: pynn.hardware.hwa.drvifall_base['exc'] = drvifallExc else: pynn.hardware.hwa.drvifall_base['exc'] = default.drvifall_base['exc'] if drvifallInh != None: pynn.hardware.hwa.drvifall_base['inh'] = drvifallInh else: pynn.hardware.hwa.drvifall_base['inh'] = default.drvifall_base['inh'] if drvioutExc != None: pynn.hardware.hwa.drviout_base['exc'] = drvioutExc else: pynn.hardware.hwa.drviout_base['exc'] = default.drviout_base['exc'] if drvioutInh != None: pynn.hardware.hwa.drviout_base['inh'] = drvioutInh else: pynn.hardware.hwa.drviout_base['inh'] = default.drviout_base['inh'] #run pynn.run(self.runtime) #temperature = pynn.hardware.hwa.getTemperature() #self.logger.debug('temperature ' + str(temperature) + ' degree Celsius') #obtain firing rate spikes = None neuronCount = 0 for neuron in neuronCollector: spikesNeuron = neuron.getSpikes() neuronCount += neuron.size if spikes == None: spikes = spikesNeuron else: spikes = np.concatenate((spikes, spikesNeuron)) if len(spikes) > 0: spikes = spikes[spikes[:,1] > self.cutfirst] if len(spikes) > 0: spikes = spikes[spikes[:,1] <= self.runtime] if filename != None: np.savetxt(os.path.join(self.folder, filename), spikes) spikesPerNeuron = float(len(spikes)) / neuronCount pynn.end() rate = spikesPerNeuron * 1e3 / (self.runtime - self.cutfirst) return rate
See also: Pfeil et al. (2014). The effect of heterogeneity on decorrelation mechanisms in spiking neural networks: a neuromorphic-hardware study. arXiv:1411.7916 [q-bio.NC]. """ # for plotting without X-server import matplotlib as mpl mpl.use("Agg") import pyNN.hardware.spikey as pynn import numpy as np pynn.setup() # set resting potential over spiking threshold runtime = 1000.0 # ms popSize = 192 weight = 7.0 * pynn.minExcWeight() neuronParams = {"v_rest": -40.0} neurons = pynn.Population(popSize, pynn.IF_facets_hardware1, neuronParams) pynn.Projection(neurons, neurons, pynn.FixedNumberPreConnector(15, weights=weight), target="inhibitory") neurons.record() pynn.run(runtime) spikes = neurons.getSpikes()
def calib(self): self.result['datetime'] = dt.datetime.now() self.result['temperature'] = 'TODO' self.result['person'] = pwd.getpwuid(os.getuid()).pw_name # one setup is necessary in order to determine spikey version pynn.setup(workStationName=self.workstation, calibOutputPins=False, calibNeuronMems=False, calibTauMem=False, calibSynDrivers=False, calibVthresh=False, calibBioDynrange=False) self.chipVersion = pynn.hardware.chipVersion() for block in range(2): if not self.chipVersion == 4 or block == 1: for pin in range(4): lower = -90. upper = -40. step = (upper - lower) / self.numVsteps for vrest in numpy.arange(lower, upper+step/2., step): pin_in = numpy.nan pin_out = numpy.nan for pinBlock in range(self.numPinBlocks): neuron = block * 192 + pinBlock * 4 + pin # necessary setup mappingOffset = neuron if self.chipVersion == 4: mappingOffset = neuron % 192 pynn.setup(useUsbAdc=True, workStationName=self.workstation, mappingOffset=mappingOffset, rng_seeds=self.seeds, avoidSpikes=True, \ calibOutputPins=False, calibNeuronMems=False, calibTauMem=False, calibSynDrivers=False, calibVthresh=False) self.neuronParams['v_rest'] = vrest # set up network n = pynn.create(pynn.IF_facets_hardware1,self.neuronParams,n=1) pynn.record_v(n, '') pynn.record(n, '') #http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance traceSum = 0.0 traceSumSqr = 0.0 # execute the experiment in a loop for i in range(self.numRuns): pynn.run(self.duration, translateToBioVoltage=False) if i==0: pin_in = pynn.hardware.hwa.vouts[neuron/192,neuron%2 + 2] else: assert pin_in == pynn.hardware.hwa.vouts[neuron/192,neuron%2 + 2], 'vout should not differ' mem = pynn.membraneOutput memMean = mem.mean() traceSum += memMean traceSumSqr += numpy.power(memMean, 2) noSpikes = len(pynn.spikeOutput[1]) if not float(noSpikes) / self.duration * 1e3 == 0: self.noSpikesTotal += noSpikes print 'there are', noSpikes, 'spikes on the membrane (most likely / hopefully ghost spikes)' assert mem.std() < self.limMemStd, 'digital spikes and spikes on the membrane found!' pin_out = traceSum / self.numRuns pin_out_std = (traceSumSqr - (numpy.power(traceSum, 2) / self.numRuns)) / (self.numRuns - 1) pynn.end() print 'For neuron',neuron,'the written voltage',pin_in,'appeared on the scope as',pin_out,'/2' #save raw data newData = numpy.vstack((neuron, pin_in, pin_out, numpy.sqrt(pin_out_std))).T if self.rawData == None: self.rawData = newData else: self.rawData = numpy.vstack((self.rawData, newData)) def filter_and_fit(dataset): #filter data dataset = numpy.atleast_2d(dataset) dataToFit = numpy.atleast_2d(dataset[dataset[:,1] >= self.voltageLimLow]) dataToFit = numpy.atleast_2d(dataToFit[dataToFit[:,1] <= self.voltageLimHigh]) noPins = len(numpy.unique(numpy.array(dataset[:,0] / 192, numpy.int) * 4 + dataset[:,0] % 4)) assert (len(dataset) - len(dataToFit)) % noPins == 0, 'discarding data failed' print 'discarded', (len(dataset) - len(dataToFit)) / noPins, 'data points' #fit polynomial return numpy.polyfit(dataToFit[:,2], dataToFit[:,1], self.polyDegree) for block in range(2): if self.chipVersion == 4 and block == 0: continue for pin in range(4): #data for output pin calibration dataOnePin = self.rawData[numpy.array(self.rawData[:,0] / 192, numpy.int) * 4 + self.rawData[:,0] % 4 == block * 4 + pin] #calculate mean over neurons with same pin vouts = numpy.unique(dataOnePin[:,1]) mean = [] std = [] for vout in vouts: mean.append(numpy.mean(dataOnePin[dataOnePin[:,1] == vout][:,2])) std.append(numpy.std(dataOnePin[dataOnePin[:,1] == vout][:,2])) dataOnePinMean = numpy.vstack((numpy.zeros_like(vouts), vouts, mean, std)).T self.result['polyFitOutputPins']['pin' + str(block * 4 + pin)] = filter_and_fit(dataOnePinMean) for pinBlock in range(self.numPinBlocks): neuron = block * 192 + pinBlock * 4 + pin #data for membrane calibration of single neurons dataOneNeuron = self.rawData[self.rawData[:,0] == neuron] self.result['polyFitNeuronMems']['neuron' + str(neuron).zfill(3)] = filter_and_fit(dataOneNeuron) print 'total number of spikes', self.noSpikesTotal, 'in', len(numpy.unique(numpy.array(self.rawData[:,0] / 192, numpy.int) * 4 + self.rawData[:,0] % 4)) * (self.numVsteps + 1) * self.numPinBlocks * self.numRuns, 'runs' return self.result, self.rawData
import pyNN.hardware.spikey as pynn import numpy as np import matplotlib.pyplot as plt noStims = 64 # number of stimuli generated on the host computer noNeurons = 32 # number of hardware neurons noInputs = 16 # number for stimuli connected to each neuron weight = 7.0 # synaptic weight in digital values rateStim = 10.0 # rate of each stimulus in 1/s runtime = 10 * 1000.0 # runtime in biological time domain in ms gLeakList = np.arange(2,251,8) # hardware range with calibTauMem turned off: [2,250] micro siemens resultCollector = [] pynn.setup(calibTauMem=False) #turn off calibration of membrane time constant tau_mem #build network stimuli = pynn.Population(noStims, pynn.SpikeSourcePoisson, {'start': 0, 'duration': runtime, 'rate': rateStim}) neurons = pynn.Population(noNeurons, pynn.IF_facets_hardware1) pynn.Projection(stimuli, neurons, pynn.FixedNumberPreConnector(noInputs, weights=weight * pynn.minExcWeight()), target='excitatory') neurons.record() #sweep over g_leak values, emulate network and record spikes for gLeakValue in gLeakList: neurons.set({'g_leak': gLeakValue}) pynn.run(runtime) resultCollector.append([gLeakValue, float(len(neurons.getSpikes())) / noNeurons / runtime * 1e3]) pynn.end() #plot results
import pyNN.hardware.spikey as pynn import numpy as np runtime = 500.0 # ms noPops = 15 # chain length #9 popSize = {'exc': 6, 'inh': 6} # size of each chain link #exc 10, inh 10 # connection probabilities probExcExc = 1.0 probExcInh = 1.0 probInhExc = 1.0 # refractory period of neurons can be tuned for optimal synfire chain bahavior neuronParams = {'tau_refrac': 10.0} pynn.setup() # define weights in digital hardware values # --> these should be tuned first to obtain synfire chain behavior! weightStimExcExc = 12 * pynn.minExcWeight() # 12 weightStimExcInh = 12 * pynn.minExcWeight() # 12 weightExcExc = 13 * pynn.minExcWeight() # 8 weightExcInh = 14 * pynn.minExcWeight() # 10 weightInhExc = 9 * pynn.minInhWeight() # 7 # kick starter input pulse(s) #stimSpikes = np.array([100.0]) # to have several kick starter pulses, use (don't forget to reduce to first entry for closed chain): stimSpikes = np.array([100.0, 200.0, 300.0])
# overwrite possibly existing data? overwrite_data = True if len(sys.argv) < 4: raise Exception('provide arguments [station] [filename results] [filename raw data]') workstation = sys.argv[1] filenameResult = sys.argv[2] filenameRawData = sys.argv[3] # TODO: add report to JSON file for raw data; up to then store in same folder as raw data filenameReport = os.path.join(os.path.splitext(filenameRawData)[0], 'report.dat') import pyNN.hardware.spikey as pynn pynn.setup(workStationName=workstation) chipVersion = pynn.getChipVersion() pynn.end() if chipVersion == 4: neuronIDs = range(192, 384) ############################################################################################## # initiatlize calibrator object for given neurons and parameters clb = calibrator.Calibrator_TauMem(neuronIDs=neuronIDs, workstation=workstation, numValues=numValues, iLeakRange=iLeakRange, filenameResult=filenameResult, filenameRawData=filenameRawData, overwrite_data=overwrite_data, chipVersion=chipVersion, reportFile=filenameReport) startTime = time.time() # run calibration clb.calib() # fit tau_mem vs iLeak
startTime = time.time() if len(sys.argv) > 1: workstation = sys.argv[1] else: workstation = None if len(sys.argv) > 2: filenameRawData = sys.argv[2] else: filenameRawData = './voutRaw.dat' # necessary setup pynn.setup(timestep=1.0, useUsbAdc=False, workStationName=workstation, calibOutputPins=False, calibIcb=False, calibTauMem=False, calibSynDrivers=False, calibVthresh=False, rng_seeds=[1234]) #################################################################### # experiment parameters # #################################################################### neuron = pynn.Population(1, pynn.IF_facets_hardware1) neuron.record() pynn.run(1000.0) pynn.hardware.hwa.sp.autocalib(pynn.hardware.hwa.cfg, filenameRawData) endTime = time.time() print 'vouts calibration took', (endTime-startTime)/60.,'minutes' print 'please commit results in "*/spikeyhal/spikeycalib.xml" (ignore the below warning that this file does not exist)'
noSpikePairs = 20 # number of spike pairs timingPrePostPlastic = 1.0 # timing between pre- and postsynaptic spikes at plastic synapse in ms intervalPairs = 100.0 # time interval between presynaptic spikes in ms noStim = 3 # number of synapses to stimulate spiking of postsynaptic neuron weightStim = 8.0 # weight of stimulating synapses timingPrePostStim = 3.4 # timing between pre- and postsynaptic spikes of stimulating synapses in ms spikePrecision = 0.3 # limit of precision of spiking in ms stimulusOffset = 100.0 # offset from beginning and end of emulation in ms (should be larger than timingPrePostPlastic) # prepare stimuli stimulus = np.arange(stimulusOffset, (noSpikePairs - 0.5) * intervalPairs + stimulusOffset, intervalPairs) stimulusPlastic = stimulus + timingPrePostStim - timingPrePostPlastic assert len(stimulus) == noSpikePairs pynn.setup(mappingOffset=column) # create postsynaptic neuron neuron = pynn.Population(1, pynn.IF_facets_hardware1) spikeSourceStim = None spikeSourcePlastic = None # place stimulating synapses above plastic synapse if row < noStim: if row > 0: dummy = pynn.Population(row, pynn.SpikeSourceArray) spikeSourcePlastic = pynn.Population(1, pynn.SpikeSourceArray, {"spike_times": stimulusPlastic}) # create stimulating inputs spikeSourceStim = pynn.Population(noStim, pynn.SpikeSourceArray, {"spike_times": stimulus}) # place stimulating synapses below plastic synapse if row >= noStim:
# have to reduce the neuron count in the decision pop to fit in five digits, #config['network']['num_dec_neurons'] = '6' #config['network']['num_inh_dec_neurons'] = '6' import neuclar.data.mnist as mnist import neuclar.vrconvert as vrconvert from timings import NeuclarTimings start_time = time.time() # doing the stuff perm = numpy.random.permutation(192) + 192 perm = numpy.concatenate((perm, numpy.arange(192))) setupargs = dict(writeConfigToFile=False, neuronPermutation=list(perm)) if not (workstation is None): setupargs['workStationName'] = workstation p.setup(**setupargs) p.setup() setup_time = time.time() # load data training_data, training_labels = mnist.get_training_data(digits, num_data_samples) testing_data, testing_labels = mnist.get_training_data(digits, num_data_samples) load_data_time = time.time() # convert data with VRs if retrain: posfilename = "vrpos-{}-{}.npy".format("".join(['{}'.format(d) for d in digits]), time.strftime("%Y-%m-%d-%H-%M-%S")) lg.info("computing new VR positions, storing them to {}".format(posfilename)) vrs = vrconvert.NeuralGasSampler() vrs.train_sampler(numpy.array(training_data, dtype=float), vrconvert.neural_gas_parameters)
#!/usr/bin/env python import pyNN.hardware.spikey as pynn import numpy as np import matplotlib.pyplot as plt weight = 7.0 # synaptic weight in digital values runtime = 10 * 1000.0 # runtime in biological time domain in ms durationInterval = 200.0 # interval between input spikes in ms neuronIndex = 42 # choose neuron on chip in range(384) synapseDriverIndex = 42 # choose synapse driver in range(256) pynn.setup(mappingOffset=neuronIndex, calibSynDrivers=False) #turn off calibration of synapse line drivers ##build network neurons = pynn.Population(1, pynn.IF_facets_hardware1) pynn.record_v(neurons[0], '') #allocate dummy synapse drivers sending no spikes if synapseDriverIndex > 0: stimuliDummy = pynn.Population(synapseDriverIndex, pynn.SpikeSourceArray, {'spike_times': []}) prj = pynn.Projection(stimuliDummy, neurons, pynn.AllToAllConnector(weights=0), target='excitatory') #allocate synapse driver and configure spike times stimProp = {'spike_times': np.arange(durationInterval, runtime - durationInterval, durationInterval)} stimuli = pynn.Population(1, pynn.SpikeSourceArray, stimProp) prj = pynn.Projection(stimuli, neurons, pynn.AllToAllConnector(weights=weight * pynn.minExcWeight()), target='excitatory') #modify properties of synapse driver print 'Range of calibration factors of drvifall for excitatory connections', prj.getDrvifallFactorsRange('exc') prj.setDrvifallFactors([0.8]) #prj.setDrvioutFactors([1.0])