def compute_latencies(folder, suffix, contains, outName): ''' load all traces, compute spike times and create raster plots ''' fnames = [] scan_directory2(folder, fnames, suffix, contains) print 'Computing latencies from %d files' % len(fnames) tOffset = 100.0 tStim = 245.0 tWindow = 25.0 tStop = 345.0 allLatencies = [] nTrials = 0 for n in range(len(fnames)): fname = fnames[n] print 'Loading spike times from file %s' % fname spikeTimes = scp.read_spike_times_file(fname) for trial in spikeTimes.keys(): nTrials += 1 for tSpike in spikeTimes[trial]: if tStim < tSpike <= tStim + tWindow: allLatencies.append(tSpike - tStim) break if not outName.endswith('.csv'): outName += '.csv' with open(outName, 'w') as outFile: line = '# Latencies in %d trials\n' % nTrials for latency in allLatencies: line += str(latency) line += '\n' outFile.write(line)
def create_active_synapse_histogram_spike_no_spike(folder, suffix, vmSuffix, contains, outName): ''' load all traces, compute spike times and create raster plots ''' excTypes = ('L2', 'L34', 'L4py', 'L4sp', 'L4ss', 'L5st', 'L5tt',\ 'L6cc', 'L6ccinv', 'L6ct', 'VPM') inhTypes = ('L1','L23Trans','L45Sym','L45Peak','L56Trans',\ 'SymLocal1','SymLocal2','SymLocal3','SymLocal4','SymLocal5','SymLocal6') plotTypes = ('L2', 'L34', 'L4py', 'L4sp', 'L4ss', 'L5st', 'L5tt',\ 'L6cc', 'L6ccinv', 'L6ct', 'VPM', 'EXC', 'INH') vmNames = [] scan_directory2(folder, vmNames, vmSuffix, contains) synNames = [] scan_directory2(folder, synNames, suffix, contains) nrOfFiles = len(synNames) print 'Creating active synapse plots from %d files' % nrOfFiles synData = {} for fname in synNames: activeSyns = scp.read_complete_synapse_activation_file(fname) synData[fname] = activeSyns synapseTimes = {} spikeTrialSyns = {} noSpikeTrialSyns = {} for excType in excTypes: synapseTimes[excType] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } spikeTrialSyns[excType] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } noSpikeTrialSyns[excType] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } synapseTimes['EXC'] = {'ApicalDendrite': [], 'Dendrite': [], 'Total': []} synapseTimes['INH'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Soma': [], 'Total': [] } spikeTrialSyns['EXC'] = {'ApicalDendrite': [], 'Dendrite': [], 'Total': []} spikeTrialSyns['INH'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Soma': [], 'Total': [] } noSpikeTrialSyns['EXC'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } noSpikeTrialSyns['INH'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Soma': [], 'Total': [] } tStim = 253.0 earlyWindow = 17.0 trials = [] trialSpikeTimes = [[] for j in range(len(vmNames))] trialWithSpikes = {} for n in range(len(vmNames)): fname = vmNames[n] print 'Loading spike times from file %s' % fname trialWithSpikes[n] = [] trialSpikeTimes_ = scp.read_spike_times_file(fname) nrOfTrials = len(trialSpikeTimes_.keys()) trials.append(nrOfTrials) for trial in trialSpikeTimes_.keys(): trialSpikeTimes[n].append([]) trialWithSpikes_ = False for tSpike in trialSpikeTimes_[trial]: if tSpike >= tStim and tSpike < tStim + earlyWindow: trialSpikeTimes[n][-1].append(tSpike - tStim) trialWithSpikes_ = True trialWithSpikes[n].append(trialWithSpikes_) for n in range(len(vmNames)): nrSpikeTrials = 0 nrNoSpikeTrials = 0 earlyProxSyns = 0 for trialNr in range(trials[n]): print 'Counting active synapses in trial %d of %d\r' % ( trialNr + 1, trials[n]), sys.stdout.flush() synTrialStr = 'simulation_run%04d_synapses.csv' % trialNr synTrialFile = '' tmpVmName = vmNames[n] for name in synNames: if synTrialStr in name and os.path.split( tmpVmName)[0] == os.path.split(name)[0]: synTrialFile = name break if synTrialFile == '': errstr = 'Could not find synapse activation file for trial nr. %d' % trialNr raise RuntimeError(errstr) activeSyns = synData[synTrialFile] for excType in excTypes: synapseTimes[excType]['ApicalDendrite'].append([]) synapseTimes[excType]['Dendrite'].append([]) synapseTimes[excType]['Total'].append([]) if trialWithSpikes[n][trialNr]: spikeTrialSyns[excType]['ApicalDendrite'].append([]) spikeTrialSyns[excType]['Dendrite'].append([]) spikeTrialSyns[excType]['Total'].append([]) else: noSpikeTrialSyns[excType]['ApicalDendrite'].append([]) noSpikeTrialSyns[excType]['Dendrite'].append([]) noSpikeTrialSyns[excType]['Total'].append([]) synapseTimes['EXC']['ApicalDendrite'].append([]) synapseTimes['EXC']['Dendrite'].append([]) synapseTimes['EXC']['Total'].append([]) synapseTimes['INH']['ApicalDendrite'].append([]) synapseTimes['INH']['Dendrite'].append([]) synapseTimes['INH']['Soma'].append([]) synapseTimes['INH']['Total'].append([]) if trialWithSpikes[n][trialNr]: spikeTrialSyns['EXC']['ApicalDendrite'].append([]) spikeTrialSyns['EXC']['Dendrite'].append([]) spikeTrialSyns['EXC']['Total'].append([]) spikeTrialSyns['INH']['ApicalDendrite'].append([]) spikeTrialSyns['INH']['Dendrite'].append([]) spikeTrialSyns['INH']['Soma'].append([]) spikeTrialSyns['INH']['Total'].append([]) else: noSpikeTrialSyns['EXC']['ApicalDendrite'].append([]) noSpikeTrialSyns['EXC']['Dendrite'].append([]) noSpikeTrialSyns['EXC']['Total'].append([]) noSpikeTrialSyns['INH']['ApicalDendrite'].append([]) noSpikeTrialSyns['INH']['Dendrite'].append([]) noSpikeTrialSyns['INH']['Soma'].append([]) noSpikeTrialSyns['INH']['Total'].append([]) for synType in activeSyns.keys(): preCellType = synType.split('_')[0] for excType in excTypes: if excType == preCellType: for syn in activeSyns[synType]: somaDist = syn[1] structure = syn[4] synTimes = syn[5] if somaDist < 500.0: synapseTimes[excType][structure][-1].extend( synTimes) synapseTimes[excType]['Total'][-1].extend( synTimes) synapseTimes['EXC'][structure][-1].extend( synTimes) synapseTimes['EXC']['Total'][-1].extend( synTimes) if trialWithSpikes[n][trialNr]: spikeTrialSyns[excType][structure][ -1].extend(synTimes) spikeTrialSyns[excType]['Total'][ -1].extend(synTimes) spikeTrialSyns['EXC'][structure][ -1].extend(synTimes) spikeTrialSyns['EXC']['Total'][-1].extend( synTimes) else: noSpikeTrialSyns[excType][structure][ -1].extend(synTimes) noSpikeTrialSyns[excType]['Total'][ -1].extend(synTimes) noSpikeTrialSyns['EXC'][structure][ -1].extend(synTimes) noSpikeTrialSyns['EXC']['Total'][ -1].extend(synTimes) for inhType in inhTypes: if inhType == preCellType: for syn in activeSyns[synType]: somaDist = syn[1] structure = syn[4] synTimes = syn[5] if somaDist < 500.0: synapseTimes['INH'][structure][-1].extend( synTimes) synapseTimes['INH']['Total'][-1].extend( synTimes) if trialWithSpikes[n][trialNr]: spikeTrialSyns['INH'][structure][ -1].extend(synTimes) spikeTrialSyns['INH']['Total'][-1].extend( synTimes) else: noSpikeTrialSyns['INH'][structure][ -1].extend(synTimes) noSpikeTrialSyns['INH']['Total'][ -1].extend(synTimes) print '' tOffset = 100.0 tPlotBegin = 220.0 tPlotBeginWindow = 50.0 binWidth = 1.0 maxCount = 0 synapseHistograms = {} spikeTrialHistograms = {} noSpikeTrialHistograms = {} for cellType in synapseTimes.keys(): synapseHistograms[cellType] = {} spikeTrialHistograms[cellType] = {} noSpikeTrialHistograms[cellType] = {} for structure in synapseTimes[cellType].keys(): synTimes = synapseTimes[cellType][structure] hist, bins = sca.PSTH_from_spike_times( synTimes, binWidth, tOffset, tPlotBegin + tPlotBeginWindow) synapseHistograms[cellType][structure] = hist, bins spikeTrialSynTimes = spikeTrialSyns[cellType][structure] hist2, bins2 = sca.PSTH_from_spike_times( spikeTrialSynTimes, binWidth, tOffset, tPlotBegin + tPlotBeginWindow) spikeTrialHistograms[cellType][structure] = hist2, bins2 noSpikeTrialSynTimes = noSpikeTrialSyns[cellType][structure] hist3, bins3 = sca.PSTH_from_spike_times( noSpikeTrialSynTimes, binWidth, tOffset, tPlotBegin + tPlotBeginWindow) noSpikeTrialHistograms[cellType][structure] = hist3, bins3 tableOutName = outName + '_all_trials.csv' with open(tableOutName, 'w') as outputTable: hist, bins = synapseHistograms['EXC']['Total'] header = 'Bin start\tbin end\tbin center' for cellType in plotTypes: header += '\t' header += cellType header += '\n' outputTable.write(header) for i in range(len(bins) - 1): line = str(bins[i]) line += '\t' line += str(bins[i + 1]) line += '\t' line += str(0.5 * (bins[i] + bins[i + 1])) for cellType in plotTypes: line += '\t' line += str(synapseHistograms[cellType]['Total'][0][i]) line += '\n' outputTable.write(line) tableOutName2 = outName + '_spike_trials.csv' with open(tableOutName2, 'w') as outputTable: hist, bins = spikeTrialHistograms['EXC']['Total'] header = 'Bin start\tbin end\tbin center' for cellType in plotTypes: header += '\t' header += cellType header += '\n' outputTable.write(header) for i in range(len(bins) - 1): line = str(bins[i]) line += '\t' line += str(bins[i + 1]) line += '\t' line += str(0.5 * (bins[i] + bins[i + 1])) for cellType in plotTypes: line += '\t' line += str(spikeTrialHistograms[cellType]['Total'][0][i]) line += '\n' outputTable.write(line) tableOutName3 = outName + '_no_spike_trials.csv' with open(tableOutName3, 'w') as outputTable: hist, bins = noSpikeTrialHistograms['EXC']['Total'] header = 'Bin start\tbin end\tbin center' for cellType in plotTypes: header += '\t' header += cellType header += '\n' outputTable.write(header) for i in range(len(bins) - 1): line = str(bins[i]) line += '\t' line += str(bins[i + 1]) line += '\t' line += str(0.5 * (bins[i] + bins[i + 1])) for cellType in plotTypes: line += '\t' line += str(noSpikeTrialHistograms[cellType]['Total'][0][i]) line += '\n' outputTable.write(line)
def synaptic_input_PCA(vmNames, synData, outName): ''' calculate PCA of temporal synaptic input patterns for all trials with/without spikes to identify "synaptic pattern dimension" with highest variance Parameterization: Per trial, E/I syn. proximal/distal in 1ms bins (0-25ms) --> 4*25-dimensional space ''' excTypes = ('L2', 'L34', 'L4py', 'L4sp', 'L4ss', 'L5st', 'L5tt',\ 'L6cc', 'L6ccinv', 'L6ct', 'VPM') inhTypes = ('L1','L23Trans','L45Sym','L45Peak','L56Trans',\ 'SymLocal1','SymLocal2','SymLocal3','SymLocal4','SymLocal5','SymLocal6') # Generic excitatory synapse analysis plotTypes = ('EXC', 'INH') spikeTrialSynsEarlyProx = {} spikeTrialSynsEarlyDistal = {} noSpikeTrialSynsEarlyProx = {} noSpikeTrialSynsEarlyDistal = {} for cellType in plotTypes: spikeTrialSynsEarlyProx[cellType] = [] spikeTrialSynsEarlyDistal[cellType] = [] noSpikeTrialSynsEarlyProx[cellType] = [] noSpikeTrialSynsEarlyDistal[cellType] = [] tOffset = 245.0 tStim = 253.0 # after VPM activation only tStimWindow = 50.0 earlySynWindow = 25.0 earlyWindow = 17.0 # after VPM activation only: 25-8 binWidth = 1.0 trials = [] trialSpikeTimes = [[] for j in range(len(vmNames))] trialWithSpikes = {} for n in range(len(vmNames)): fname = vmNames[n] print 'Loading spike times from file %s' % fname trialWithSpikes[n] = [] trialSpikeTimes_ = scp.read_spike_times_file(fname) nrOfTrials = len(trialSpikeTimes_.keys()) trials.append(nrOfTrials) for trial in trialSpikeTimes_.keys(): trialSpikeTimes[n].append([]) trialWithSpikes_ = False for tSpike in trialSpikeTimes_[trial]: if tSpike >= tStim and tSpike < tStim + earlyWindow: trialSpikeTimes[n][-1].append(tSpike - tStim) trialWithSpikes_ = True trialWithSpikes[n].append(trialWithSpikes_) synNames = synData.keys() for n in range(len(vmNames)): nrSpikeTrials = 0 nrNoSpikeTrials = 0 for trialNr in range(trials[n]): print 'Counting active synapses in trial %d of %d\r' % ( trialNr + 1, trials[n]), sys.stdout.flush() synTrialStr = 'simulation_run%04d_synapses.csv' % trialNr synTrialFile = '' tmpVmName = vmNames[n] for name in synNames: if synTrialStr in name and os.path.split( tmpVmName)[0] == os.path.split(name)[0]: synTrialFile = name break if synTrialFile == '': errstr = 'Could not find synapse activation file for trial nr. %d' % trialNr raise RuntimeError(errstr) activeSyns = synData[synTrialFile] synapseTimes = {} synapseTimesProx = {} synapseTimesDistal = {} for excType in excTypes: synapseTimes[excType] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } synapseTimesProx[excType] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } synapseTimesDistal[excType] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } synapseTimes['EXC'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } synapseTimesProx['EXC'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } synapseTimesDistal['EXC'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Total': [] } synapseTimes['INH'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Soma': [], 'Total': [] } synapseTimesProx['INH'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Soma': [], 'Total': [] } synapseTimesDistal['INH'] = { 'ApicalDendrite': [], 'Dendrite': [], 'Soma': [], 'Total': [] } for synType in activeSyns.keys(): preCellType = synType.split('_')[0] for excType in excTypes: if excType == preCellType: for syn in activeSyns[synType]: somaDist = syn[1] structure = syn[4] synTimes = syn[5] if somaDist < 500.0: synapseTimesProx['EXC']['Total'].extend( synTimes) else: synapseTimesDistal['EXC']['Total'].extend( synTimes) for inhType in inhTypes: if inhType == preCellType: for syn in activeSyns[synType]: somaDist = syn[1] structure = syn[4] synTimes = syn[5] if somaDist < 500.0: synapseTimesProx['INH']['Total'].extend( synTimes) else: synapseTimesDistal['INH']['Total'].extend( synTimes) if not trialWithSpikes[n][trialNr]: nrNoSpikeTrials += 1 for cellType in plotTypes: synTimesProx = synapseTimesProx[cellType]['Total'] synTimesDistal = synapseTimesDistal[cellType]['Total'] tSynProxTmpList = [] tSynDistalTmpList = [] for tSyn in synTimesProx: if tOffset <= tSyn < tOffset + earlySynWindow: tSynProxTmpList.append(tSyn - tOffset) for tSyn in synTimesDistal: if tOffset <= tSyn < tOffset + earlySynWindow: tSynDistalTmpList.append(tSyn - tOffset) proxHist, tmpBins = np.histogram(tSynProxTmpList, bins=range(26)) distalHist, tmpBins = np.histogram(tSynDistalTmpList, bins=range(26)) noSpikeTrialSynsEarlyProx[cellType].append(proxHist) noSpikeTrialSynsEarlyDistal[cellType].append(distalHist) elif trialWithSpikes[n][trialNr]: nrSpikeTrials += 1 tSpikeReference = np.min(trialSpikeTimes[n][trialNr]) for cellType in plotTypes: synTimesProx = synapseTimesProx[cellType]['Total'] synTimesDistal = synapseTimesDistal[cellType]['Total'] tSynProxTmpList = [] tSynDistalTmpList = [] for tSyn in synTimesProx: if tOffset <= tSyn < tOffset + earlySynWindow: tSynProxTmpList.append(tSyn - tOffset) for tSyn in synTimesDistal: if tOffset <= tSyn < tOffset + earlySynWindow: tSynDistalTmpList.append(tSyn - tOffset) proxHist, tmpBins = np.histogram(tSynProxTmpList, bins=range(26)) distalHist, tmpBins = np.histogram(tSynDistalTmpList, bins=range(26)) spikeTrialSynsEarlyProx[cellType].append(proxHist) spikeTrialSynsEarlyDistal[cellType].append(distalHist) print '' print 'Nr of trials with spike: %d' % nrSpikeTrials print 'Nr of trials without spike: %d' % nrNoSpikeTrials print 'mean spike time = %.1fms' % np.mean( [tSpike for trace in trialSpikeTimes[n] for tSpike in trace]) trialSpikeList = [] totalHistList = [] totalSpikeTrials = len(spikeTrialSynsEarlyProx['EXC']) totalNoSpikeTrials = len(noSpikeTrialSynsEarlyProx['EXC']) # concatenate E/I/prox/distal histograms in the order: # prox. E/prox. I/distal E/distal I for i in range(totalSpikeTrials): trialSpikeList.append(1) trialHist = [] trialHist.extend(spikeTrialSynsEarlyProx['EXC'][i]) trialHist.extend(spikeTrialSynsEarlyProx['INH'][i]) trialHist.extend(spikeTrialSynsEarlyDistal['EXC'][i]) trialHist.extend(spikeTrialSynsEarlyDistal['INH'][i]) totalHistList.append(trialHist) for i in range(totalNoSpikeTrials): trialSpikeList.append(0) trialHist = [] trialHist.extend(noSpikeTrialSynsEarlyProx['EXC'][i]) trialHist.extend(noSpikeTrialSynsEarlyProx['INH'][i]) trialHist.extend(noSpikeTrialSynsEarlyDistal['EXC'][i]) trialHist.extend(noSpikeTrialSynsEarlyDistal['INH'][i]) totalHistList.append(trialHist) allData = np.array(totalHistList) dataMean = np.mean(allData) allData = allData - dataMean eigenvectors, eigenvals, V = np.linalg.svd(allData.T, full_matrices=False) projectedData = np.dot(allData, eigenvectors).transpose() PC1LoadVec = eigenvectors.transpose()[0] proxELoad = PC1LoadVec[:25] proxILoad = PC1LoadVec[25:50] distalELoad = PC1LoadVec[50:75] distalILoad = PC1LoadVec[75:] PC2LoadVec = eigenvectors.transpose()[1] proxELoad2 = PC2LoadVec[:25] proxILoad2 = PC2LoadVec[25:50] distalELoad2 = PC2LoadVec[50:75] distalILoad2 = PC2LoadVec[75:] with open(outName + '_PC1_PC2_Load.csv', 'w') as outFile1: header = 'time (ms)\tPC1 E prox load\tPC1 I prox load\tPC1 E distal load\tPC1 I distal load\t' header += 'PC2 E prox load\tPC2 I prox load\tPC2 E distal load\tPC2 I distal load\n' outFile1.write(header) for i in range(25): line = str(i + 0.5) line += '\t' line += str(proxELoad[i]) line += '\t' line += str(proxILoad[i]) line += '\t' line += str(distalELoad[i]) line += '\t' line += str(distalILoad[i]) line += '\t' line += str(proxELoad2[i]) line += '\t' line += str(proxILoad2[i]) line += '\t' line += str(distalELoad2[i]) line += '\t' line += str(distalILoad2[i]) line += '\n' outFile1.write(line) with open(outName + '_PC1_PC2.csv', 'w') as outFile2: header = 'spike trial 1/0\tPC1\tPC2\n' outFile2.write(header) for i in range(len(projectedData[0])): line = str(trialSpikeList[i]) line += '\t' line += str(projectedData[0][i]) line += '\t' line += str(projectedData[1][i]) line += '\n' outFile2.write(line)
def iso_probability_plot(folder, suffix, whisker, outName): fnames = [] scan_directory(folder, fnames, suffix) synNrTimingFilenames = {} for fname in fnames: basePath = fname.split('/')[-3] nSyn = int(basePath.split('_')[1]) synTiming = float(basePath.split('_')[5]) if not synNrTimingFilenames.has_key(nSyn): synNrTimingFilenames[nSyn] = {} synNrTimingFilenames[nSyn][synTiming] = fname tBegin = 8.0 if whisker == 'PW': window = 7.0 elif whisker == 'SW': window = 9.0 elif whisker == 'E2': window = 17.0 synNrTimingProbs = {} offset = 245.0 for nSyn in synNrTimingFilenames.keys(): synNrTimingProbs[nSyn] = {} for synTiming in synNrTimingFilenames[nSyn].keys(): fname = synNrTimingFilenames[nSyn][synTiming] print 'Analyzing spike times in file %s' % fname trialSpikeTimes = scp.read_spike_times_file(fname) nrOfTrials = len(trialSpikeTimes.keys()) spikeTrials = 0.0 for trial in trialSpikeTimes.keys(): for tSpike in trialSpikeTimes[trial]: if tBegin <= tSpike - offset < tBegin + window: spikeTrials += 1 break spikeProb = spikeTrials / nrOfTrials synNrTimingProbs[nSyn][synTiming] = spikeProb print 'Window: %.1f - %.1f ms: spike prob = %.2f' % ( tBegin, tBegin + window, spikeProb) numbers = np.array(range(25, 350, 25), dtype=np.float64) timings = np.array(range(2, 25, 1), dtype=np.float64) #numberTimingMesh = np.meshgrid(numbers,timings) #spikeProbMesh = np.zeros_like(numberTimingMesh[0]) #for i in range(len(spikeProbMesh)-1): #for j in range(len(spikeProbMesh[i])-1): #number = int(numberTimingMesh[0][i][j]) #timing = numberTimingMesh[1][i][j] #try: #spikeProbMesh[i][j] = synNrTimingProbs[number][timing] #except KeyError: #spikeProbMesh[i][j] = 0.0 numberTimingMesh = np.meshgrid(timings, numbers) spikeProbMesh = np.zeros_like(numberTimingMesh[0]) for i in range(len(spikeProbMesh) - 1): for j in range(len(spikeProbMesh[i]) - 1): timing = numberTimingMesh[0][i][j] number = int(numberTimingMesh[1][i][j]) try: spikeProbMesh[i][j] = synNrTimingProbs[number][timing] except KeyError: spikeProbMesh[i][j] = 0.0 plt.figure(1) if whisker == 'PW': isocontours = [0.02, 0.04, 0.08, 0.15, 0.18, 0.3, 0.6, 0.9] elif whisker == 'SW' or whisker == 'E2': isocontours = [0.01, 0.02, 0.04, 0.08, 0.15, 0.3, 0.6, 0.9] CS = plt.contour(numberTimingMesh[0], numberTimingMesh[1], spikeProbMesh, isocontours) plt.clabel(CS, inline=1, fontsize=10, inline_spacing=-15) titleStr = 'Reduced SPD model - iso-probability contours for %s touch' % whisker plt.title(titleStr) plt.xlabel('Synapse timing (median; ms)') plt.ylabel('Synapse number') plt.xlim(2.0, 24.0) plt.ylim(0, 325) plt.xticks(np.arange(2, 24, 1.0), [str(i) for i in range(2, 24, 1)]) if whisker == 'PW': plt.yticks(np.arange(25, 325, 25.0), [str(0.76 * i) for i in range(25, 325, 25)]) # PW: 76% syn <500 microns elif whisker == 'SW': plt.yticks(np.arange(25, 325, 25.0), [str(0.69 * i) for i in range(25, 325, 25)]) # SW: 69% syn <500 microns elif whisker == 'E2': plt.yticks(np.arange(25, 325, 25.0), [str(0.68 * i) for i in range(25, 325, 25)]) # E2: 68% syn <500 microns cbar = plt.colorbar() cbar.set_label('Spike prob') plt.savefig(outName + '_isocontours.pdf')
def PSTH_timing_number_analysis(folder, suffix, tBegin, window, outName): fnames = [] scan_directory(folder, fnames, suffix) synNrTimingFilenames = {} for fname in fnames: basePath = fname.split('/')[1] nSyn = int(basePath.split('_')[1]) synTiming = float(basePath.split('_')[5]) if not synNrTimingFilenames.has_key(nSyn): synNrTimingFilenames[nSyn] = {} synNrTimingFilenames[nSyn][synTiming] = fname #synNrTimingProbs = {} #binWidth = 5.0 #offset = 145.0 # 245 - 100 #beginBin = int((offset+tBegin)/binWidth) #endBin = beginBin + int(window/binWidth) + 1 #for nSyn in synNrTimingFilenames.keys(): # synNrTimingProbs[nSyn] = {} # for synTiming in synNrTimingFilenames[nSyn].keys(): # fname = synNrTimingFilenames[nSyn][synTiming] # data = np.loadtxt(fname, skiprows=1, unpack=True) # PSTH = data[2] # spikeProb = np.sum(PSTH[beginBin:endBin]) # synNrTimingProbs[nSyn][synTiming] = spikeProb #synNrTimingProbs = {} #offset = 245.0 #for nSyn in synNrTimingFilenames.keys(): # synNrTimingProbs[nSyn] = {} # for synTiming in synNrTimingFilenames[nSyn].keys(): # fname = synNrTimingFilenames[nSyn][synTiming] # print 'Analyzing traces in file %s' % fname # vmData = np.loadtxt(fname, skiprows=1, unpack=True) # t = vmData[0] # nrOfTrials = len(vmData) - 1 # spikeTrials = 0.0 # for i in range(nrOfTrials): # v = vmData[i+1] # trialSpikeTimes = sca.simple_spike_detection(t, v) # for tSpike in trialSpikeTimes: # if tBegin <= tSpike - offset < tBegin + window: # spikeTrials += 1 # break # spikeProb = spikeTrials/nrOfTrials # synNrTimingProbs[nSyn][synTiming] = spikeProb # print 'Window: %.1f - %.1f ms: spike prob = %.2f' % (tBegin, tBegin+window, spikeProb) synNrTimingProbs = {} offset = 245.0 for nSyn in synNrTimingFilenames.keys(): synNrTimingProbs[nSyn] = {} for synTiming in synNrTimingFilenames[nSyn].keys(): fname = synNrTimingFilenames[nSyn][synTiming] print 'Analyzing spike times in file %s' % fname trialSpikeTimes = scp.read_spike_times_file(fname) nrOfTrials = len(trialSpikeTimes.keys()) spikeTrials = 0.0 for trial in trialSpikeTimes.keys(): for tSpike in trialSpikeTimes[trial]: if tBegin <= tSpike - offset < tBegin + window: spikeTrials += 1 break spikeProb = spikeTrials / nrOfTrials synNrTimingProbs[nSyn][synTiming] = spikeProb print 'Window: %.1f - %.1f ms: spike prob = %.2f' % ( tBegin, tBegin + window, spikeProb) numbers = np.array(range(25, 350, 25), dtype=np.float64) timings = np.array(range(2, 25, 1), dtype=np.float64) #timings = np.array(range(2,11,1), dtype=np.float64) numberTimingMesh = np.meshgrid(numbers, timings) spikeProbMesh = np.zeros_like(numberTimingMesh[0]) for i in range(len(spikeProbMesh) - 1): for j in range(len(spikeProbMesh[i]) - 1): number = int(numberTimingMesh[0][i][j]) timing = numberTimingMesh[1][i][j] try: spikeProbMesh[i][j] = synNrTimingProbs[number][timing] except KeyError: spikeProbMesh[i][j] = 0.0 ## for SuW use 0.5 as max: #if spikeProbMesh[i][j] > 0.5: #spikeProbMesh[i][j] = 0.5 #with open(outName+'_grid_probs.csv', 'w') as outFile: #synNumbers = synNrTimingProbs.keys() #synNumbers.sort() #synTimings = synNrTimingProbs[synNumbers[0]].keys() #synTimings.sort() #header = '#syn nr.\\timing' #for synTiming in synTimings: #header += '\t' #header += '%.1f' % synTiming #header += '\n' #outFile.write(header) #for synNr in synNumbers: ##line = str(synNr) ##line = str(0.76*synNr) # PW #line = str(0.69*synNr) # SuW #for synTiming in synTimings: #line += '\t' #try: #line += str(synNrTimingProbs[synNr][synTiming]) #except KeyError: #line += '-1.0' #line += '\n' #outFile.write(line) # PW # L3, L4ss/sp, L5tt, L6cc, VPM, L6cc+VPM #numberMeans = 1/0.76*np.array([74.4, 89.9, 62.2, 54.8, 54.6, 109.4]) #numberSTD = 1/0.76*np.array([14.3, 24.8, 18.2, 12.5, 14.1, 17.7]) #timingMeans = np.array([16.0, 20.3, 14.9, 7.0, 2.8, 4.1]) #timingSTD = np.array([2.8, 2.3, 3.3, 3.6, 0.7, 0.6]) # SuW # L3, L4ss/sp, L5tt, L6cc, VPM #numberMeans = 1/0.69*np.array([24.4, 23.0, 40.0, 37.7, 6.1]) #numberSTD = 1/0.69*np.array([15.1, 22.2, 19.9, 12.2, 6.8]) #timingMeans = np.array([20.5, 21.0, 18.2, 10.1, 8.1]) #timingSTD = np.array([5.2, 6.0, 4.2, 5.1, 7.2]) # E2 # L3/4, L5tt, L6cc, VPM numberMeans = 1 / 0.68 * np.array([2.2, 9.0, 19.1, 0.1]) numberSTD = 1 / 0.68 * np.array([9.6, 11.8, 10.2, 1.4]) timingMeans = np.array([21.6, 20.8, 14.1, 18.3]) timingSTD = np.array([5.3, 5.1, 7.0, 11.7]) plt.figure(1) plt.errorbar(numberMeans, timingMeans, xerr=numberSTD, yerr=timingSTD, fmt='ro') #plt.pcolormesh(numberTimingMesh[0], numberTimingMesh[1], spikeProbMesh, cmap='hot') # PW #isocontours = [0.02, 0.04, 0.08, 0.15, 0.18, 0.3, 0.6, 0.9] # SuW isocontours = [0.01, 0.02, 0.04, 0.08, 0.15, 0.3, 0.6, 0.9] CS = plt.contour(numberTimingMesh[0], numberTimingMesh[1], spikeProbMesh, isocontours) plt.clabel(CS, inline=1, fontsize=10, inline_spacing=-15) plt.title('Simplest default with labels') plt.xlabel('Synapse number') plt.ylabel('Synapse timing (median; ms)') plt.xlim(0, 325) plt.ylim(2.0, 24.0) #plt.xticks(np.arange(25,325,25.0), [str(0.76*i) for i in range(25,325,25)]) # PW: 76% syn <500 microns #plt.xticks(np.arange(25,325,25.0), [str(0.69*i) for i in range(25,325,25)]) # SuW: 69% syn <500 microns plt.xticks(np.arange(25, 325, 25.0), [str(0.68 * i) for i in range(25, 325, 25)]) # SuW: 68% syn <500 microns #plt.yticks(np.arange(2,23,1.0), [str(i) for i in range(2,23,1)]) plt.yticks(np.arange(2, 24, 1.0), [str(i) for i in range(2, 24, 1)]) cbar = plt.colorbar() cbar.set_label('Spike prob') plt.savefig(outName + '_isocontours.pdf')
def PSTH_timing_number_analysis(folder, suffix, tBegin, window, outName): fnames = [] scan_directory(folder, fnames, suffix) synNrTimingFilenames = {} for fname in fnames: basePath = fname.split('/')[1] nSyn = int(basePath.split('_')[1]) synTiming = float(basePath.split('_')[5]) if not synNrTimingFilenames.has_key(nSyn): synNrTimingFilenames[nSyn] = {} synNrTimingFilenames[nSyn][synTiming] = fname #synNrTimingProbs = {} #binWidth = 5.0 #offset = 145.0 # 245 - 100 #beginBin = int((offset+tBegin)/binWidth) #endBin = beginBin + int(window/binWidth) + 1 #for nSyn in synNrTimingFilenames.keys(): #synNrTimingProbs[nSyn] = {} #for synTiming in synNrTimingFilenames[nSyn].keys(): #fname = synNrTimingFilenames[nSyn][synTiming] #data = np.loadtxt(fname, skiprows=1, unpack=True) #PSTH = data[2] #spikeProb = np.sum(PSTH[beginBin:endBin]) #synNrTimingProbs[nSyn][synTiming] = spikeProb #synNrTimingProbs = {} #offset = 245.0 #for nSyn in synNrTimingFilenames.keys(): # synNrTimingProbs[nSyn] = {} # for synTiming in synNrTimingFilenames[nSyn].keys(): # fname = synNrTimingFilenames[nSyn][synTiming] # print 'Analyzing traces in file %s' % fname # vmData = np.loadtxt(fname, skiprows=1, unpack=True) # t = vmData[0] # nrOfTrials = len(vmData) - 1 # spikeTrials = 0.0 # for i in range(nrOfTrials): # v = vmData[i+1] # trialSpikeTimes = sca.simple_spike_detection(t, v) # for tSpike in trialSpikeTimes: # if tBegin <= tSpike - offset < tBegin + window: # spikeTrials += 1 # break # spikeProb = spikeTrials/nrOfTrials # synNrTimingProbs[nSyn][synTiming] = spikeProb # print 'Window: %.1f - %.1f ms: spike prob = %.2f' % (tBegin, tBegin+window, spikeProb) synNrTimingProbs = {} offset = 245.0 for nSyn in synNrTimingFilenames.keys(): synNrTimingProbs[nSyn] = {} for synTiming in synNrTimingFilenames[nSyn].keys(): fname = synNrTimingFilenames[nSyn][synTiming] print 'Analyzing spike times in file %s' % fname trialSpikeTimes = scp.read_spike_times_file(fname) nrOfTrials = len(trialSpikeTimes.keys()) spikeTrials = 0.0 for trial in trialSpikeTimes.keys(): for tSpike in trialSpikeTimes[trial]: if tBegin <= tSpike - offset < tBegin + window: spikeTrials += 1 break spikeProb = spikeTrials / nrOfTrials synNrTimingProbs[nSyn][synTiming] = spikeProb print 'Window: %.1f - %.1f ms: spike prob = %.2f' % ( tBegin, tBegin + window, spikeProb) numbers = np.array(range(50, 400, 25), dtype=np.float64) timings = np.array(range(2, 11, 1), dtype=np.float64) numberTimingMesh = np.meshgrid(numbers, timings) spikeProbMesh = np.zeros_like(numberTimingMesh[0]) for i in range(len(spikeProbMesh) - 1): for j in range(len(spikeProbMesh[i]) - 1): number = int(numberTimingMesh[0][i][j]) timing = numberTimingMesh[1][i][j] spikeProbMesh[i][j] = synNrTimingProbs[number][timing] with open(outName + '_grid_probs.csv', 'w') as outFile: synNumbers = synNrTimingProbs.keys() synNumbers.sort() synTimings = synNrTimingProbs[synNumbers[0]].keys() synTimings.sort() header = '#syn nr.\\timing' for synTiming in synTimings: header += '\t' header += '%.1f' % synTiming header += '\n' outFile.write(header) for synNr in synNumbers: line = str(synNr) for synTiming in synTimings: line += '\t' line += str(synNrTimingProbs[synNr][synTiming]) line += '\n' outFile.write(line) plt.figure(1) plt.pcolormesh(numberTimingMesh[0], numberTimingMesh[1], spikeProbMesh, cmap='hot') plt.xlabel('Synapse number') plt.ylabel('Synapse timing (median; ms)') plt.xlim(50, 350) plt.ylim(2.0, 10.0) plt.xticks(np.arange(62.5, 362.5, 25.0), [str(0.76 * i) for i in range(50, 350, 25)]) # PW: 76% syn <500 microns plt.yticks(np.arange(2.5, 10.5, 1.0), [str(i) for i in range(2, 10, 1)]) cbar = plt.colorbar() cbar.set_label('Spike prob') plt.savefig(outName + '_heatmap.pdf')