コード例 #1
0
ファイル: addPSCs.py プロジェクト: ineuron/NeuroDAQ-Analysis
# Lag traces and add them
number = 20
freq = 20.
isi = 1/freq*1000 # in ms

result = []
for n in range(number):
    trace = data
    npad = int(n*isi/dt)
    laggedTrace = np.pad(trace, (npad, 0), 'constant', constant_values=(0,0)) 
    #if n>0: laggedTrace = laggedTrace[:-npad]
    result.append(laggedTrace)

maxshape = result[len(result)-1].shape
alignedTraces = []
c=0
for trace in result:
    size = len(trace)
    trace = np.resize(trace, maxshape)
    trace[size::] = 0
    alignedTraces.append(trace)
    #if c==1: plt.plot(trace)    
    c=c+1

summedTrace = np.array(alignedTraces).sum(axis=0)
plt.plot(summedTrace)
plt.show()

ndaq.store_data(summedTrace, attrs={'dt':1})

コード例 #2
0
# Get onsets after manually sorting cut events
# 
# Select item with events to start

import numpy as np
import matplotlib.pylab as plt
from console import utils as ndaq

# Get data
item =  browser.ui.workingDataTree.selectedItems()[0]

# Iterate events and get onsets
onsets = []
for c in range(item.childCount()):
    onsets.append(item.child(c).attrs['onset'])

# Save
ndaq.store_data(np.array(onsets), name='xOnsets')

コード例 #3
0
ファイル: ap_thr.py プロジェクト: ineuron/NeuroDAQ-Analysis
# Get AP thresholds using a derivative threshold

import numpy as np
import matplotlib.pylab as plt
from analysis import smooth
from console import utils as ndaq

# Get data
data = ndaq.get_data()
dt = browser.ui.dataPlotsWidget.plotDataItems[0].attrs['dt']

# Get thresholds
cutoff = 10.0  # mV/mv
ths = []
for d in data:
    diff = np.diff(d)/dt
    i = diff>cutoff
    l = np.arange(len(i))
    ths.append(d[l[0]])

# Store data
m = np.mean(ths)
print m
ndaq.store_data(np.array(ths), name='ths')
コード例 #4
0
# Lag traces and add them
number = 20
freq = 20.
isi = 1 / freq * 1000  # in ms

result = []
for n in range(number):
    trace = data
    npad = int(n * isi / dt)
    laggedTrace = np.pad(trace, (npad, 0), 'constant', constant_values=(0, 0))
    #if n>0: laggedTrace = laggedTrace[:-npad]
    result.append(laggedTrace)

maxshape = result[len(result) - 1].shape
alignedTraces = []
c = 0
for trace in result:
    size = len(trace)
    trace = np.resize(trace, maxshape)
    trace[size::] = 0
    alignedTraces.append(trace)
    #if c==1: plt.plot(trace)
    c = c + 1

summedTrace = np.array(alignedTraces).sum(axis=0)
plt.plot(summedTrace)
plt.show()

ndaq.store_data(summedTrace, attrs={'dt': 1})
コード例 #5
0
import numpy as np
import matplotlib.pylab as plt
from console import utils as ndaq

# Get data
data = ndaq.get_data()

# Subtract average of all channels from each channel
# baseline traces first
avg = data.mean(axis=0)
refData = []
for n in range(len(data)):
    refData.append(data[n,:]-avg)

# Store data
ndaq.store_data(refData, name='refData', attrs={'dt': 1./30})


コード例 #6
0
start = 0  #onset-pre
end = 3800  #onset+post

# Cut tdms
pos = tdms.object('Real-time Coordinates', 'X-Vertical').data[start:end] * 10
spot = tdms.object('Visual  Stimulation', 'Spot Diameter').data[start:end] * 10

# Cut probe data
#print frameIndex[onset]
data = []
print frameIndex[end], dataStart
pstart = frameIndex[start] + dataStart
pend = frameIndex[end] + dataStart
c = 0
for chn in channels:
    print chn
    trace = channels[chn][pstart:pend]
    data.append(trace - np.mean(trace) + c)
    c -= 1000

###########
# Plot data
###########

############
# Store data
############
ndaq.store_data(pos, name='position', attrs={'dt': 1. / 50})
ndaq.store_data(spot, name='spot', attrs={'dt': 1. / 50})
ndaq.store_data(data, name='probe data', attrs={'dt': 1. / 30000})
コード例 #7
0
ファイル: test.py プロジェクト: ineuron/NeuroDAQ-Analysis
    decay = trace[xpeak+25:]
    avgDecay = scaledAvg[xpeak+25:]
    plt.plot(scaledAvg, 'r')
    plt.plot(trace, 'k')
    plt.plot((decay-avgDecay)**2)

    # Divide into bins
    binSize = decay.min()/nbins    

    # Get mean and variance current per bin
    m, v = [], []
    for b in range(nbins):
        ymin = b*binSize
        ymax = (b+1)*binSize
        i = (decay<ymin) & (decay>ymax)  # For inward currents
        current = decay[i].mean()
        var = (np.sum((decay[i]-avgDecay[i])**2))/(np.sum(i)-1)
        
        m.append(current)
        v.append(var)
    
    results1.append(np.array(m))
    results2.append(np.array(v)) 
    ndaq.plot_data(np.array(m), np.array(v))

plt.show()
# Store
ndaq.store_data(results1)
ndaq.store_data(results2)

コード例 #8
0
items = ndaq.get_items()
dt = items[0].attrs['dt']

# Detect events and get measures
ap = int(AP_time/dt-AP_winCut/dt)
numbers, times, eventOnsets = [], [], []
for d in data:
    # Detect
    onset, vpoints = detect(d)
    #print onset
    # Select
    #i = vpoints[0:ap]<vm
    i = d[0:ap]<vm
    if np.sum(i)>0:
      l = np.arange(0,len(i))
      xpoint = l[i][-1]
      a = onset[onset>xpoint]
      events = a[a<ap]
      # Measure
      if len(events)>0:
        numbers.append(len(events))
        times.append(AP_time - events[-1]*dt)
        eventOnsets.append(AP_time - events*dt)

# Save data
print np.mean(numbers), np.mean(times)
ndaq.store_data(np.array(numbers), name='numbers')
ndaq.store_data(np.array(times), name='last_times')
ndaq.store_data(list(eventOnsets), name='onsets')

コード例 #9
0
# Get data
item =  browser.ui.workingDataTree.selectedItems()[0]
print item.text(0)
for c in range(item.childCount()):
    if 'trace' in item.child(c).text(0): trace = item.child(c)
    if 'xOnsets' in item.child(c).text(0): xonsets = item.child(c).data

# xOnsets is in datapoints, convert to ms
dt = trace.attrs['dt']
xonsets = xonsets * dt

# Convert to frequency
freq = 1000./np.diff(xonsets)

# Make histogram
nbins = 100
binsRange = (0,20)
n, bins, patches = plt.hist(freq, bins=nbins, range=binsRange, normed=False, histtype='stepfilled')
n = n/float(np.sum(n))

# Store data
ndaq.store_data(n, name='n')
ndaq.store_data(bins, name='bins')
ndaq.store_data(np.array(freq), name='median_freq') 



# AP nbins = 50, binsRange = 10
# EPSC nbins = 0, binsRange = 10
コード例 #10
0
import numpy as np
import numpy as np
import matplotlib.pylab as plt
import scipy.signal as signal
from analysis import smooth
from console import utils as ndaq

# Get data and items
data = ndaq.get_data()
items = ndaq.get_items()
dt = items[0].attrs['dt']
dt = .04

# Get peaks, decays and peak times
decayTimes = []
for trace in data:
    peak = trace.min()
    decay = peak * 0.37
    peakTime = trace.argmin()
    decayTrace = trace[peakTime:]
    idx = decayTrace < decay
    decayTime = np.sum(idx) * dt
    #plt.plot(decayTrace[idx])
    print decayTime
    decayTimes.append(decayTime)
#plt.show()

ndaq.store_data(decayTimes, name='decay_times')
コード例 #11
0
ファイル: decays.py プロジェクト: ineuron/NeuroDAQ-Analysis
c1, c2 = ndaq.get_cursors()
dt = items[0].attrs['dt']

# Setup exponential decay curve
xdecay = c2
xpeak = c1
tau = 45
x = np.arange(0, c2*dt-c1*dt, dt)

# Go through plotted traces
normDecays = []
for item in items:
    data = item.data
    ydecay = data[xdecay]
    ypeak = data[xpeak]    
    if ydecay>ypeak: ydecay=ypeak*0.99  # for the cases it does not decay
    # Get values from theoretical curve
    y = ypeak * np.exp(-(x/tau))  # don't need this, just for plotting
    tDecay = -tau * np.log(ydecay/ypeak)     
    # Normalise
    #print ydecay, (xdecay-xpeak)*dt, tDecay
    val = (xdecay-xpeak)*dt/tDecay
    if not np.isnan(val): normDecays.append(val)

# Plot decay curve
plotWidget.plot(x+c1*dt, y, pen=pg.mkPen('r', width=1))

# Save data
ndaq.store_data(np.array(normDecays), name='decays')
print np.mean(normDecays), normDecays
コード例 #12
0
# Get AP thresholds using a derivative threshold

import numpy as np
import matplotlib.pylab as plt
from analysis import smooth
from console import utils as ndaq

# Get data
data = ndaq.get_data()
dt = browser.ui.dataPlotsWidget.plotDataItems[0].attrs['dt']

# Get thresholds
cutoff = 10.0  # mV/mv
ths = []
for d in data:
    diff = np.diff(d) / dt
    i = diff > cutoff
    l = np.arange(len(i))
    ths.append(d[l[0]])

# Store data
m = np.mean(ths)
print m
ndaq.store_data(np.array(ths), name='ths')
コード例 #13
0
ファイル: test.py プロジェクト: ineuron/NeuroDAQ-Analysis
    # Get decay part only
    decay = trace[xpeak + 25:]
    avgDecay = scaledAvg[xpeak + 25:]
    plt.plot(scaledAvg, 'r')
    plt.plot(trace, 'k')
    plt.plot((decay - avgDecay)**2)

    # Divide into bins
    binSize = decay.min() / nbins

    # Get mean and variance current per bin
    m, v = [], []
    for b in range(nbins):
        ymin = b * binSize
        ymax = (b + 1) * binSize
        i = (decay < ymin) & (decay > ymax)  # For inward currents
        current = decay[i].mean()
        var = (np.sum((decay[i] - avgDecay[i])**2)) / (np.sum(i) - 1)

        m.append(current)
        v.append(var)

    results1.append(np.array(m))
    results2.append(np.array(v))
    ndaq.plot_data(np.array(m), np.array(v))

plt.show()
# Store
ndaq.store_data(results1)
ndaq.store_data(results2)
コード例 #14
0
# Get onsets after manually sorting cut events
#
# Select item with events to start

import numpy as np
import matplotlib.pylab as plt
from console import utils as ndaq

# Get data
item = browser.ui.workingDataTree.selectedItems()[0]

# Iterate events and get onsets
onsets = []
for c in range(item.childCount()):
    onsets.append(item.child(c).attrs['onset'])

# Save
ndaq.store_data(np.array(onsets), name='xOnsets')
コード例 #15
0
item = browser.ui.workingDataTree.selectedItems()[0]
print item.text(0)
for c in range(item.childCount()):
    if 'trace' in item.child(c).text(0): trace = item.child(c)
    if 'xOnsets' in item.child(c).text(0): xonsets = item.child(c).data

# xOnsets is in datapoints, convert to ms
dt = trace.attrs['dt']
xonsets = xonsets * dt

# Convert to frequency
freq = 1000. / np.diff(xonsets)

# Make histogram
nbins = 100
binsRange = (0, 20)
n, bins, patches = plt.hist(freq,
                            bins=nbins,
                            range=binsRange,
                            normed=False,
                            histtype='stepfilled')
n = n / float(np.sum(n))

# Store data
ndaq.store_data(n, name='n')
ndaq.store_data(bins, name='bins')
ndaq.store_data(np.array(freq), name='median_freq')

# AP nbins = 50, binsRange = 10
# EPSC nbins = 0, binsRange = 10
コード例 #16
0
f_stop = 150
deltafreq = (f_stop - f_start) / 60.
#probe.get_timeFreq(probe.dataWin, f_start, f_stop, deltafreq)

# Make average spectogram
#tfrData = np.array([s.map for s in probe.tfrData])
#tfrMean = tfrData.mean(axis=0)

# Plot analysis
#for ax in canvas.fig.axes:
#    canvas.fig.delaxes(ax)
#nPlots = len(probe.tfrData)+2
#gs = gridspec.GridSpec(nPlots, 1)

#probe.tfrData[0].map = tfrMean # hack the tfr plotting method
#for plot in range(nPlots):
#    ax = canvas.fig.add_subplot(gs[plot])
#    if plot==0:
#        ax.plot(probe.spot)
#    elif plot==1:
#        ax.plot(probe.pos)
#    else:
#        probe.tfrData[plot-2].plot(ax, colorbar=False, clim=[0,80])
#canvas.draw()

# Save to NDaq
#ndaq.store_data(probe.pos, name='position', attrs={'dt':20.})
#ndaq.store_data(probe.spot, name='visualStim', attrs={'dt':20.})
#ndaq.store_data(probe.dataWin, attrs={'dt': 1./30})
ndaq.store_data(probe.data, attrs={'dt': 1. / 30})
コード例 #17
0
# Get trigger indices 
ivector = np.arange(0,len(data[0]))
itrigger = ivector[data[0]==1]

# Get loom events
pre = 200
post = 500
events = []
for i in itrigger:
    event = data[1][i-pre:i+post]
    events.append(event)
    #plt.plot(event)

#plt.show()
ndaq.store_data(events, name='loom_events')

# Detect failures and escape latency
nestPos = 400.0
escapeThs = -10.0  # derivative of X position
failures, latencies = [], []
devents = []
for event in events:
    if np.sum(event<nestPos)==0:
        failures.append(1)
    else:
        failures.append(0)
        devent = np.diff(event[pre::])
        latencies.append(np.sum(devent<escapeThs))
        devents.append(devent)
コード例 #18
0
if len(data) > 1000: data = [data]  # hack for when there is only one trace
items = ndaq.get_items()
dt = items[0].attrs['dt']

# Detect events and get measures
ap = int(AP_time / dt - AP_winCut / dt)
numbers, times, eventOnsets = [], [], []
for d in data:
    # Detect
    onset, vpoints = detect(d)
    #print onset
    # Select
    #i = vpoints[0:ap]<vm
    i = d[0:ap] < vm
    if np.sum(i) > 0:
        l = np.arange(0, len(i))
        xpoint = l[i][-1]
        a = onset[onset > xpoint]
        events = a[a < ap]
        # Measure
        if len(events) > 0:
            numbers.append(len(events))
            times.append(AP_time - events[-1] * dt)
            eventOnsets.append(AP_time - events * dt)

# Save data
print np.mean(numbers), np.mean(times)
ndaq.store_data(np.array(numbers), name='numbers')
ndaq.store_data(np.array(times), name='last_times')
ndaq.store_data(list(eventOnsets), name='onsets')
コード例 #19
0
c1, c2 = ndaq.get_cursors()
dt = items[0].attrs['dt']

# Setup exponential decay curve
xdecay = c2
xpeak = c1
tau = 45
x = np.arange(0, c2 * dt - c1 * dt, dt)

# Go through plotted traces
normDecays = []
for item in items:
    data = item.data
    ydecay = data[xdecay]
    ypeak = data[xpeak]
    if ydecay > ypeak: ydecay = ypeak * 0.99  # for the cases it does not decay
    # Get values from theoretical curve
    y = ypeak * np.exp(-(x / tau))  # don't need this, just for plotting
    tDecay = -tau * np.log(ydecay / ypeak)
    # Normalise
    #print ydecay, (xdecay-xpeak)*dt, tDecay
    val = (xdecay - xpeak) * dt / tDecay
    if not np.isnan(val): normDecays.append(val)

# Plot decay curve
plotWidget.plot(x + c1 * dt, y, pen=pg.mkPen('r', width=1))

# Save data
ndaq.store_data(np.array(normDecays), name='decays')
print np.mean(normDecays), normDecays
コード例 #20
0
        b = np.mean(data[n][0:ebsl])
        data[n] = data[n] - b
    return data


# Get rid of negative/positive going events
def neg(data, limit):
    # limit is fraction of min or max
    dataClean, dataNorm = [], []
    for n in data:
        epeak = n[0:10 / 0.02].max()
        if n.min() > -limit[0] * epeak and n.max() < limit[1] * epeak:
            dataClean.append(n)
            dataNorm.append(n / epeak)
    return dataClean, dataNorm


# Run
#result = vm_sort((-50,-10))
result = vm_sort((-75, -15))
#result = isi_sort(100)
result = baseline(result)
result1, result2 = neg(result, (0.5, 1.5))
#result = avg(result)

# Save
#ndaq.store_data(events, name='events', attrs={'dt':dt})
#ndaq.store_data(result, name='events', attrs={'dt':dt})
#ndaq.store_data(result1, name='events_clean', attrs={'dt':dt})
ndaq.store_data(result2, name='events_norm', attrs={'dt': dt})
コード例 #21
0
import numpy as np
import matplotlib.pylab as plt
import scipy.signal as signal
from analysis import smooth
from console import utils as ndaq

# Get data and items
data = ndaq.get_data()
items = ndaq.get_items()
dt = items[0].attrs['dt']

# Parameters
start = 0.2
end = 0.8

# Get peaks, decays and peak times
riseTimes = []
for trace in data:
    peak = trace.min()    
    peakTime = trace.argmin()
    riseTrace = trace[:peakTime]
    idx = (riseTrace<start*peak) & (riseTrace>end*peak)
    riseTime = np.sum(idx)*dt
    plt.plot(riseTrace[idx])
    print peak*start, peak*end, riseTime
    riseTimes.append(riseTime)
plt.show()

ndaq.store_data(riseTimes, name='rise_times')

コード例 #22
0
# Get trigger indices
ivector = np.arange(0, len(data[0]))
itrigger = ivector[data[0] == 1]

# Get loom events
pre = 200
post = 500
events = []
for i in itrigger:
    event = data[1][i - pre:i + post]
    events.append(event)
    #plt.plot(event)

#plt.show()
ndaq.store_data(events, name='loom_events')

# Detect failures and escape latency
nestPos = 400.0
escapeThs = -10.0  # derivative of X position
failures, latencies = [], []
devents = []
for event in events:
    if np.sum(event < nestPos) == 0:
        failures.append(1)
    else:
        failures.append(0)
        devent = np.diff(event[pre::])
        latencies.append(np.sum(devent < escapeThs))
        devents.append(devent)
コード例 #23
0
import numpy as np
import matplotlib.pylab as plt
import scipy.signal as signal
from analysis import smooth
from console import utils as ndaq

# Get data and items
data = ndaq.get_data()
items = ndaq.get_items()
dt = items[0].attrs['dt']

# Parameters
start = 0.2
end = 0.8

# Get peaks, decays and peak times
riseTimes = []
for trace in data:
    peak = trace.min()
    peakTime = trace.argmin()
    riseTrace = trace[:peakTime]
    idx = (riseTrace < start * peak) & (riseTrace > end * peak)
    riseTime = np.sum(idx) * dt
    plt.plot(riseTrace[idx])
    print peak * start, peak * end, riseTime
    riseTimes.append(riseTime)
plt.show()

ndaq.store_data(riseTimes, name='rise_times')
コード例 #24
0
ファイル: lfp.py プロジェクト: ineuron/NeuroDAQ-Analysis
#tfrData = np.array([s.map for s in probe.tfrData])
#tfrMean = tfrData.mean(axis=0)

# Plot analysis
#for ax in canvas.fig.axes:
#    canvas.fig.delaxes(ax)
#nPlots = len(probe.tfrData)+2
#gs = gridspec.GridSpec(nPlots, 1)

#probe.tfrData[0].map = tfrMean # hack the tfr plotting method
#for plot in range(nPlots):
#    ax = canvas.fig.add_subplot(gs[plot])
#    if plot==0:
#        ax.plot(probe.spot)
#    elif plot==1:
#        ax.plot(probe.pos)
#    else:       
#        probe.tfrData[plot-2].plot(ax, colorbar=False, clim=[0,80])
#canvas.draw()


# Save to NDaq
#ndaq.store_data(probe.pos, name='position', attrs={'dt':20.})
#ndaq.store_data(probe.spot, name='visualStim', attrs={'dt':20.})
#ndaq.store_data(probe.dataWin, attrs={'dt': 1./30})
ndaq.store_data(probe.data, attrs={'dt': 1./30})


     

コード例 #25
0
    bsl = round(bsl/dt)
    onset = round(onset/dt)
    data = np.array(data)
    for n in range(len(data)):
        b = np.mean(data[n,onset-bsl:onset])        
        data[n] = data[n]- b
    return data    

# Get rid of negative/positive going events
def neg(data, limit):
    # limit is fraction of min or max
    dataClean, dataNorm = [], []
    for n in data:
        epeak = n[0:10/0.02].max()
        if n.min()>-limit[0]*epeak and n.max()<limit[1]*epeak: 
            dataClean.append(n)
            dataNorm.append(n/epeak)
    return dataClean, dataNorm

# Run
#result = vm_sort(20,2, (-75,-10))
result = isi_sort(100)  #100
result = baseline(5,5,result)
result1, result2 = neg(result, (0.2,1.5))  #(0.2,1.5)
#result = avg(result)


# Save
ndaq.store_data(result1, name='events_clean', attrs={'dt':dt})
ndaq.store_data(result2, name='events_norm', attrs={'dt':dt})
コード例 #26
0
import numpy as np
import numpy as np
import matplotlib.pylab as plt
import scipy.signal as signal
from analysis import smooth
from console import utils as ndaq

# Get data and items
data = ndaq.get_data()
items = ndaq.get_items()
dt = items[0].attrs['dt']
dt = .04

# Get peaks, decays and peak times
decayTimes = []
for trace in data:
    peak = trace.min()
    decay = peak*0.37
    peakTime = trace.argmin()
    decayTrace = trace[peakTime:]
    idx = decayTrace<decay
    decayTime = np.sum(idx)*dt
    #plt.plot(decayTrace[idx])
    print decayTime
    decayTimes.append(decayTime)
#plt.show()

ndaq.store_data(decayTimes, name='decay_times')

コード例 #27
0
    print chn
    trace = channels[chn][pstart:pend]
    data.append(trace-np.mean(trace)+c)
    c-=1000


###########
# Plot data
###########



############
# Store data
############
ndaq.store_data(pos, name='position', attrs={'dt':1./50})
ndaq.store_data(spot, name='spot', attrs={'dt':1./50})
ndaq.store_data(data, name='probe data', attrs={'dt':1./30000})