def check_wave(self, wave, cutrange): """Check which threshold exceeding peaks in wave data look like spikes and return only events that fall within cutrange. Search local spatiotemporal window around thresh exceeding peak for biggest peak-to-peak sharpness. Test that together they exceed Vpp thresh. TODO: keep an eye on broad spike at ptc15.87.1024880, about 340 us wide. Should be counted though """ sort = self.sort AD2uV = sort.converter.AD2uV if self.extractparamsondetect: weights2f = sort.extractor.weights2f f = g2 # 2D Gaussian #f = cauchy2 # 2D Cauchy # holds time indices for each enabled chan until which each enabled chani is # locked out, updated on every found spike lockouts = np.zeros(self.nchans, dtype=np.int64) tsharp = time.time() sharp = util.sharpness2D(wave.data) info('%s: sharpness2D() took %.3f sec' % (ps().name, time.time()-tsharp)) targthreshsharp = time.time() # thresh exceeding peak indices: peakis = util.argthreshsharp(wave.data, self.thresh, sharp) info('%s: argthreshsharp() took %.3f sec' % (ps().name, time.time()-targthreshsharp)) maxti = len(wave.ts) - 1 dti = self.dti twi = sort.twi sdti = dti // 2 # spatial dti: max dti allowed between maxchan and all other chans nspikes = 0 npeaks = len(peakis) spikes = np.zeros(npeaks, self.SPIKEDTYPE) # nspikes will always be <= npeaks # TODO: test whether np.empty or np.zeros is faster overall in this case wavedata = np.empty((npeaks, self.maxnchansperspike, self.maxnt), dtype=np.int16) # check each peak for validity for ti, chani in peakis: if DEBUG: debug('*** trying thresh peak at t=%d chan=%d' % (wave.ts[ti], self.chans[chani])) # is this thresh exceeding peak locked out? if ti <= lockouts[chani]: if DEBUG: debug('peak is locked out') continue # skip to next peak # find all enabled chanis within locknbh of chani # lockouts are checked later chanis = self.locknbhdi[chani] nchans = len(chanis) # get sharpness window DT on either side of this peak t0i = max(ti-dti, 0) # check for lockouts a bit later t1i = ti+dti+1 # +1 makes it end inclusive, don't worry about slicing past end window = wave.data[chanis, t0i:t1i] # multichan data window, might not be contig # collect peak-to-peak sharpness for all chans # save max and adjacent sharpness timepoints for each chan, and keep track # of which of the two adjacent non locked out peaks is the sharpest localsharp = sharp[chanis, t0i:t1i] ppsharp = np.zeros(nchans, dtype=np.float32) maxsharpis = np.zeros(nchans, dtype=int) adjpeakis = np.zeros((nchans, 2), dtype=int) maxadjiis = np.zeros(nchans, dtype=int) for cii in range(nchans): localpeakis, = np.where(localsharp[cii] != 0.0) lastpeakii = len(localpeakis) - 1 try: maxsharpii = abs(localsharp[cii, localpeakis]).argmax() except ValueError: continue # localpeakis is empty maxsharpi = localpeakis[maxsharpii] maxsharpis[cii] = maxsharpi # get one adjacent peak to left and right each, due to limits, either or # both may be identical to the max sharpness peak adjpeakis[cii] = localpeakis[[max(maxsharpii-1, 0), min(maxsharpii+1, lastpeakii)]] if localsharp[cii, maxsharpi] < 0: maxadjii = localsharp[cii, adjpeakis[cii]].argmax() # look for +ve adj peak else: maxadjii = localsharp[cii, adjpeakis[cii]].argmin() # look for -ve adj peak if maxadjii == 0 and (t0i+adjpeakis[cii, maxadjii] < lockouts[chanis[cii]]): # adjacent peak comes before maxsharpi and is locked out maxadjii = 1 # choose adjacent peak that falls after maxsharpi maxadjiis[cii] = maxadjii # save adjpi = adjpeakis[cii, maxadjii] # if max sharpness peak is the only one, then I think ppsharp comes out # as zero, and chan cii is therefore ignored when searching for biggest # ppsharp. Not sure if that's ideal, maybe ppsharp in such a case should # just be the max sharpness value ppsharp[cii] = localsharp[cii, maxsharpi] - localsharp[cii, adjpi] oldti = ti # save oldchani = chani # save # choose chan with biggest ppsharp as maxchan, check that this is identical to # the trigger chan, that its sharpest peak isn't locked out, that it falls within # cutrange, and that it meets both Vp and Vpp thresh criteria maxcii = abs(ppsharp).argmax() chani = chanis[maxcii] # update maxchan if chani != oldchani: if DEBUG: debug("triggered off peak on chan that isn't max ppsharpness for " "this event, pass on this peak and wait for the true " "sharpest peak to come later") continue maxsharpi = maxsharpis[maxcii] ti = t0i + maxsharpi # choose sharpest peak of maxchan, absolute # if sharpest peak is in the past, use it. If it's yet to come, wait for it if ti > oldti: if DEBUG: debug("triggered off early adjacent peak for this event, " "pass on this peak and wait for the true sharpest peak " "to come later") continue if ti <= lockouts[chani]: # sharpest peak is locked out if DEBUG: debug('sharpest peak at t=%d chan=%d is locked out' % (wave.ts[ti], self.chans[chani])) continue if not (cutrange[0] <= wave.ts[ti] <= cutrange[1]): if DEBUG: # use %r since wave.ts[ti] is np.int64 and %d gives TypeError if > 2**31 debug("spike time %r falls outside cutrange for this searchblock " "call, discarding" % wave.ts[ti]) continue # skip to next peak # check that Vp thresh is exceeded by one of the two sharpest peaks adjpi = adjpeakis[maxcii, maxadjiis[maxcii]] # relative to t0i, not necessarily in temporal order: maxchantis = np.array([maxsharpi, adjpi]) Vp = abs(window[maxcii, maxchantis]).max() # grab biggest peak if Vp < self.thresh[chani]: if DEBUG: debug('peak at t=%d chan=%d and its adjacent peak are both < Vp' % (wave.ts[ti], self.chans[chani])) continue # check that Vpp thresh is exceeded by the two sharpest peaks Vs = window[maxcii, maxchantis] Vpp = abs(Vs).sum() # Vs are of opposite sign if Vpp < self.ppthresh[chani]: if DEBUG: debug('peaks at t=%r chan=%d are < Vpp' % (wave.ts[[ti, t0i+adjpi]], self.chans[chani])) continue if DEBUG: debug('found biggest thresh exceeding ppsharp at t=%d chan=%d' % (wave.ts[ti], self.chans[chani])) # get new spatiotemporal neighbourhood, with full window # align to -ve of the two sharpest peaks aligni = localsharp[maxcii, maxchantis].argmin() #oldti = ti # save ti = t0i + maxchantis[aligni] # new absolute time index to align to # cut new window oldt0i = t0i t0i = max(ti+twi[0], 0) t1i = min(ti+twi[1]+1, maxti) # end inclusive window = wave.data[chanis, t0i:t1i] # multichan data window, might not be contig maxcii, = np.where(chanis == chani) maxchantis += oldt0i - t0i # relative to new t0i tis = np.zeros((nchans, 2), dtype=int) # holds time indices for each lockchani tis[maxcii] = maxchantis # pick corresponding peaks on other chans according to how close they are # to those on maxchan, Don't consider the sign of the peaks on each # chan, just their proximity in time. In other words, allow for spike # inversion across space localsharp = sharp[chanis, t0i:t1i] peak0ti, peak1ti = maxchantis for cii in range(nchans): if cii == maxcii: # already set continue localpeakis, = np.where(localsharp[cii] != 0.0) if len(localpeakis) == 0: # empty tis[cii] = maxchantis # use same tis as maxchan continue lastpeakii = len(localpeakis) - 1 # find peak on this chan that's temporally closest to primary peak on maxchan. # If two peaks are equally close, this picks the first one, although we should # probably pick the sharpest one instead: dt0is = abs(localpeakis-peak0ti) peak0ii = dt0is.argmin() # save primary peak for this cii dt0i = dt0is[peak0ii] if dt0i > sdti: # too distant in time tis[cii, 0] = peak0ti # use same t0i as maxchan else: # give it its own t0i tis[cii, 0] = localpeakis[peak0ii] # save 2ndary peak for this cii if peak0ti < peak1ti: # primary peak comes first (more common case) peak1ii = peak0ii + 1 # 2ndary peak is 1 to the right else: # peak1ti < peak0ti, ie 2ndary peak comes first peak1ii = peak0ii - 1 # 2ndary peak is 1 to the left dt1is = abs(localpeakis-peak1ti) try: dt1i = dt1is[peak1ii] except IndexError: # no local peak relative to primary peak tis[cii, 1] = peak1ti # use same t1i as maxchan continue if dt1i > sdti: # too distant in time tis[cii, 1] = peak1ti # use same t1i as maxchan else: tis[cii, 1] = localpeakis[peak1ii] # find inclchanis, get corresponding indices into locknbhd of chanis inclchanis = self.inclnbhdi[chani] ninclchans = len(inclchanis) inclchans = self.chans[inclchanis] chan = self.chans[chani] inclchani = int(np.where(inclchans == chan)[0]) # != chani! inclciis = chanis.searchsorted(inclchanis) if DEBUG: debug("final window params: t0=%r, t1=%r, Vs=%r, peakts=\n%r" % (wave.ts[t0i], wave.ts[t1i], list(AD2uV(Vs)), wave.ts[t0i+tis])) # build up spike record s = spikes[nspikes] s['t'] = wave.ts[ti] # leave each spike's chanis in sorted order, as they are in self.inclnbhdi, # important assumption used later on, like in sort.get_wave() and # Neuron.update_wave() ts = wave.ts[t0i:t1i] # use ts = np.arange(s['t0'], s['t1'], stream.tres) to reconstruct s['t0'], s['t1'] = wave.ts[t0i], wave.ts[t1i] incltis = tis[inclciis] s['tis'][:ninclchans] = incltis # wrt t0i s['aligni'] = aligni # 0 or 1 s['dt'] = int(abs(ts[tis[maxcii, 0]] - ts[tis[maxcii, 1]])) # in us s['V0'], s['V1'] = AD2uV(Vs) # in uV s['Vpp'] = AD2uV(Vpp) # in uV s['chan'], s['chans'][:ninclchans], s['nchans'] = chan, inclchans, ninclchans s['chani'] = inclchani inclwindow = window[inclciis] nt = inclwindow.shape[1] # isn't always full width if recording has gaps wavedata[nspikes, :ninclchans, :nt] = inclwindow if self.extractparamsondetect: # Get Vpp at each inclchan's tis, use as spatial weights: # see core.rowtake() or util.rowtake_cy() for indexing explanation: w = np.float32(inclwindow[np.arange(ninclchans)[:, None], incltis]) w = abs(w).sum(axis=1) x = self.siteloc[inclchanis, 0] # 1D array (row) y = self.siteloc[inclchanis, 1] s['x0'], s['y0'], s['sx'], s['sy'] = weights2f(f, w, x, y, inclchani) if DEBUG: debug('*** found new spike %d: %r @ (%d, %d)' % (nspikes+self.nspikes, s['t'], self.siteloc[chani, 0], self.siteloc[chani, 1])) # give each chan a distinct lockout, based on how each chan's # sharpest peaks line up with those of the maxchan. This fixes double # triggers that happened about 1% of the time (ptc18.14.7166200 & ptc18.14.9526000) lockouts[chanis] = t0i + tis.max(axis=1) if DEBUG: debug('lockouts=%r\nfor chans=%r' % (list(wave.ts[lockouts[chanis]]), list(self.chans[chanis]))) nspikes += 1 # shrink spikes and wavedata down to actual needed size spikes.resize(nspikes, refcheck=False) wds = wavedata.shape wavedata.resize((nspikes, wds[1], wds[2]), refcheck=False) return spikes, wavedata
def check_wave(self, wave, cutrange): """Check which threshold-exceeding peaks in wave data look like spikes and return only events that fall within cutrange. Search local spatiotemporal window around threshold-exceeding peak for biggest peak-to-peak sharpness. Finally, test that the sharpest peak and its neighbour exceed Vp and Vpp thresholds""" sort = self.sort AD2uV = sort.converter.AD2uV if self.extractparamsondetect: weights2f = sort.extractor.weights2spatial f = sort.extractor.f # holds time indices for each enabled chan until which each enabled chani is # locked out, updated on every found spike lockouts = np.zeros(self.nchans, dtype=np.int64) tsharp = time.time() sharp = util.sharpness2D( wave.data) # sharpness of all zero-crossing separated peaks info('%s: sharpness2D() took %.3f sec' % (ps().name, time.time() - tsharp)) targthreshsharp = time.time() # threshold-exceeding peak indices (2D, columns are [tis, cis]) peakis = util.argthreshsharp(wave.data, self.thresh, sharp) info('%s: argthreshsharp() took %.3f sec' % (ps().name, time.time() - targthreshsharp)) maxti = len(wave.ts) - 1 dti = self.dti twi = sort.twi sdti = dti // 2 # spatial dti: max dti allowed between maxchan and all other chans nspikes = 0 npeaks = len(peakis) spikes = np.zeros(npeaks, self.SPIKEDTYPE) # nspikes will always be <= npeaks ## TODO: test whether np.empty or np.zeros is faster overall in this case wavedata = np.empty((npeaks, self.maxnchansperspike, self.maxnt), dtype=np.int16) # check each threshold-exceeding peak for validity: for peaki, (ti, chani) in enumerate(peakis): if DEBUG: self.log('*** trying thresh peak at t=%r chan=%d' % (wave.ts[ti], self.chans[chani])) # is this threshold-exceeding peak locked out? tlockoutchani = lockouts[chani] if ti <= tlockoutchani: if DEBUG: self.log('peak is locked out') continue # skip to next peak # find all enabled chanis within inclnbh of chani, lockouts are checked later: chanis = self.inclnbhdi[chani] nchans = len(chanis) # get search window DT on either side of this peak, for checking sharpness t0i = max(ti - dti, 0) # check for lockouts a bit later t1i = ti + dti + 1 # +1 makes it end inclusive, don't worry about slicing past end window = wave.data[chanis, t0i:t1i] # search window, might not be contig if DEBUG: self.log( 'searching window (%d, %d) on chans=%r' % (wave.ts[t0i], wave.ts[t1i], list(self.chans[chanis]))) # Collect peak-to-peak sharpness for all chans. Save max and adjacent sharpness # timepoints for each chan, and keep track of which of the two adjacent non locked # out peaks is the sharpest. Note that the localsharp array contain sharpness of # all local peaks, not just those that exceed threshold, as in peakis array. localsharp = sharp[chanis, t0i:t1i] # sliced the same way as window ppsharp = np.zeros(nchans, dtype=np.float32) maxsharpis = np.zeros(nchans, dtype=int) adjpeakis = np.zeros((nchans, 2), dtype=int) maxadjiis = np.zeros(nchans, dtype=int) continuepeaki = False # signal to skip to next peaki for cii in range(nchans): localpeakis, = np.where(localsharp[cii] != 0.0) # keep only non-locked out localpeakis on this channel: localpeakis = localpeakis[( t0i + localpeakis) > lockouts[chanis[cii]]] if len(localpeakis) == 0: continue # localpeakis is empty lastpeakii = len(localpeakis) - 1 maxsharpii = abs(localsharp[cii, localpeakis]).argmax() maxsharpi = localpeakis[maxsharpii] maxsharpis[cii] = maxsharpi # Get one adjacent peak to left and right each. Due to limits, either or # both may be identical to the max sharpness peak adjpeakis[cii] = localpeakis[[ max(maxsharpii - 1, 0), min(maxsharpii + 1, lastpeakii) ]] if localsharp[cii, maxsharpi] < 0: maxadjii = localsharp[ cii, adjpeakis[cii]].argmax() # look for +ve adj peak else: maxadjii = localsharp[ cii, adjpeakis[cii]].argmin() # look for -ve adj peak maxadjiis[cii] = maxadjii # save adjpi = adjpeakis[cii, maxadjii] if maxsharpi != adjpi: ppsharp[cii] = localsharp[cii, maxsharpi] - localsharp[cii, adjpi] else: # monophasic spike, set ppsharp == sharpness of single peak: ppsharp[cii] = localsharp[cii, maxsharpi] if chanis[cii] == chani: # trigger chan is monophasic # ensure ppsharp of monophasic spike >= Vppthresh**2/dt, ie ensure that # its Vpp exceeds Vppthresh and has zero crossings on either side, # with no more than dt between. Avoids excessively wide # monophasic peaks from being considered as spikes: if DEBUG: self.log("found monophasic spike") if abs(ppsharp[cii]) < self.ppthresh[chani]**2 / dti: continuepeaki = True if DEBUG: self.log( "peak wasn't sharp enough for a monophasic " "spike") break # out of cii loop if continuepeaki: continue # skip to next peak # Choose chan with biggest ppsharp as maxchan and its sharpest peak as the primary # peak, check that these new chani and ti values are identical to the trigger # values in peakis, that the peak at [chani, ti] isn't locked out, that it falls # within cutrange, and that it meets both Vp and Vpp threshold criteria. oldchani, oldti = chani, ti # save maxcii = abs(ppsharp).argmax( ) # choose chan with sharpest peak as new maxchan chani = chanis[maxcii] # update maxchan maxsharpi = maxsharpis[ maxcii] # choose sharpest peak of maxchan, absolute ti = t0i + maxsharpi # update ti # Search forward through peakis for a future (later) row that matches the # (potentially new) [chani, ti] calculated above based on sharpness of local # peaks. If that particular tuple is indeed coming up, it is therefore # thresh exceeding, and should be waited for. If not, don't wait for it. Something # that was thresh exceeding caused the trigger, but this nearby [chani, ti] tuple # is according to the sharpness measure the best estimate of the spatiotemporal # origin of the trigger-causing event. newpeak_coming_up = (peakis[peaki + 1:] == [ti, chani ]).prod(axis=1).any() if chani != oldchani: if newpeak_coming_up: if DEBUG: self.log( "triggered off peak on chan that isn't max ppsharpness for " "this event, pass on this peak and wait for the true " "sharpest peak to come later") continue # skip to next peak else: # update all variables that depend on chani that wouldn't otherwise be # updated: tlockoutchani = lockouts[chani] chanis = self.inclnbhdi[chani] nchans = len(chanis) if ti > oldti: if newpeak_coming_up: if DEBUG: self.log( "triggered off early adjacent peak for this event, pass on " "this peak and wait for the true sharpest peak to come later" ) continue # skip to next peak else: # unlike chani, it seems that are no variables that depend on ti that # wouldn't otherwise be updated: pass if ti <= tlockoutchani: # sharpest peak is locked out if DEBUG: self.log('sharpest peak at t=%d chan=%d is locked out' % (wave.ts[ti], self.chans[chani])) continue # skip to next peak if not (cutrange[0] <= wave.ts[ti] <= cutrange[1]): # use %r since wave.ts[ti] is np.int64 and %d gives TypeError if > 2**31: if DEBUG: self.log( "spike time %r falls outside cutrange for this searchblock " "call, discarding" % wave.ts[ti]) continue # skip to next peak # check that Vp threshold is exceeded by at least one of the two sharpest peaks adjpi = adjpeakis[maxcii, maxadjiis[maxcii]] # relative to t0i, not necessarily in temporal order: maxchantis = np.array([maxsharpi, adjpi]) # voltages of the two sharpest peaks, convert int16 to int64 to prevent overflow Vs = np.int64(window[maxcii, maxchantis]) Vp = abs(Vs).max() # grab biggest peak if Vp < self.thresh[chani]: if DEBUG: self.log( 'peak at t=%d chan=%d and its adjacent peak are both ' '< Vp=%f uV' % (wave.ts[ti], self.chans[chani], AD2uV(Vp))) continue # skip to next peak # check that the two sharpest peaks together exceed Vpp threshold: Vpp = abs(Vs[0] - Vs[1]) # Vs are of opposite sign, unless monophasic if Vpp == 0: # monophasic spike Vpp = Vp # use Vp as Vpp if Vpp < self.ppthresh[chani]: if DEBUG: self.log('peaks at t=%r chan=%d are < Vpp = %f' % (wave.ts[[ti, t0i + adjpi ]], self.chans[chani], AD2uV(Vpp))) continue # skip to next peak if DEBUG: self.log( 'found biggest thresh exceeding ppsharp at t=%d chan=%d' % (wave.ts[ti], self.chans[chani])) # get new spatiotemporal neighbourhood, with full window, # align to -ve of the two sharpest peaks aligni = localsharp[maxcii, maxchantis].argmin() #oldti = ti # save ti = t0i + maxchantis[ aligni] # new absolute time index to align to # cut new window oldt0i = t0i t0i = max(ti + twi[0], 0) t1i = min(ti + twi[1] + 1, maxti) # end inclusive window = wave.data[ chanis, t0i:t1i] # multichan data window, might not be contig maxcii, = np.where(chanis == chani) maxchantis += oldt0i - t0i # relative to new t0i tis = np.zeros((nchans, 2), dtype=int) # holds time indices for each lockchani tis[maxcii] = maxchantis # pick corresponding peaks on other chans according to how close they are # to those on maxchan, Don't consider the sign of the peaks on each # chan, just their proximity in time. In other words, allow for spike # inversion across space localsharp = sharp[chanis, t0i:t1i] peak0ti, peak1ti = maxchantis # primary and 2ndary peak tis of maxchan for cii in range(nchans): if cii == maxcii: # already set continue localpeakis, = np.where(localsharp[cii] != 0.0) # keep only non-locked out localpeakis on this channel: localpeakis = localpeakis[( t0i + localpeakis) > lockouts[chanis[cii]]] if len(localpeakis) == 0: # localpeakis is empty tis[cii] = maxchantis # use same tis as maxchan continue lastpeakii = len(localpeakis) - 1 # find peak on this chan that's temporally closest to primary peak on maxchan. # If two peaks are equally close, pick the sharpest one dt0is = abs(localpeakis - peak0ti) if (np.diff(dt0is) == 0 ).any(): # two peaks equally close, pick sharpest one peak0ii = abs(localsharp[cii, localpeakis]).argmax() else: peak0ii = dt0is.argmin() # save primary peak for this cii dt0i = dt0is[peak0ii] if dt0i > sdti: # too distant in time tis[cii, 0] = peak0ti # use same t0i as maxchan else: # give it its own t0i tis[cii, 0] = localpeakis[peak0ii] # save 2ndary peak for this cii if len(localpeakis ) == 1: # monophasic, set 2ndary peak same as primary tis[cii, 1] = tis[cii, 0] continue if peak0ti <= peak1ti: # primary peak comes first (more common case) peak1ii = min(peak0ii + 1, lastpeakii) # 2ndary peak is 1 to the right else: # peak1ti < peak0ti, ie 2ndary peak comes first peak1ii = max(peak0ii - 1, 0) # 2ndary peak is 1 to the left dt1is = abs(localpeakis - peak1ti) dt1i = dt1is[peak1ii] if dt1i > sdti: # too distant in time tis[cii, 1] = peak1ti # use same t1i as maxchan else: tis[cii, 1] = localpeakis[peak1ii] # based on maxchan (chani), find inclchanis, incltis, and inclwindow: inclchanis = self.inclnbhdi[chani] ninclchans = len(inclchanis) inclchans = self.chans[inclchanis] chan = self.chans[chani] inclchani = int(np.where(inclchans == chan)[0]) # != chani! inclciis = chanis.searchsorted(inclchanis) incltis = tis[inclciis] inclwindow = window[inclciis] if DEBUG: self.log( "final window params: t0=%r, t1=%r, Vs=%r, peakts=\n%r" % (wave.ts[t0i], wave.ts[t1i], list( AD2uV(Vs)), wave.ts[t0i + tis])) if self.extractparamsondetect: # Get Vpp at each inclchan's tis, use as spatial weights: # see core.rowtake() or util.rowtake_cy() for indexing explanation: w = np.float32(inclwindow[np.arange(ninclchans)[:, None], incltis]) w = abs(w).sum(axis=1) x = self.siteloc[inclchanis, 0] # 1D array (row) y = self.siteloc[inclchanis, 1] params = weights2f(f, w, x, y, inclchani) if params == None: # presumably a non-localizable many-channel noise event if DEBUG: treject = intround(wave.ts[ti]) # nearest us self.log("reject spike at t=%d based on fit params" % treject) # no real need to lockout chans for a params-rejected spike continue # skip to next peak # build up spike record: s = spikes[nspikes] # wave.ts might be floats, depending on sampfreq s['t'] = intround(wave.ts[ti]) # nearest us # leave each spike's chanis in sorted order, as they are in self.inclnbhdi, # important assumption used later on, like in sort.get_wave() and # Neuron.update_wave() ts = wave.ts[t0i:t1i] # potentially floats # use ts = np.arange(s['t0'], s['t1'], stream.tres) to reconstruct s['t0'], s['t1'] = intround(wave.ts[t0i]), intround( wave.ts[t1i]) # nearest us s['tis'][:ninclchans] = incltis # wrt t0i=0 s['aligni'] = aligni # 0 or 1 s['dt'] = intround(abs(ts[tis[maxcii, 0]] - ts[tis[maxcii, 1]])) # nearest us s['V0'], s['V1'] = AD2uV(Vs) # in uV s['Vpp'] = AD2uV(Vpp) # in uV s['chan'], s['chans'][:ninclchans], s[ 'nchans'] = chan, inclchans, ninclchans s['chani'] = inclchani nt = inclwindow.shape[ 1] # isn't always full width if recording has gaps wavedata[nspikes, :ninclchans, :nt] = inclwindow if self.extractparamsondetect: # Save spatial fit params, and lockout only the channels within lockrx*sx # of the fit spatial location of the spike, up to a max of self.inclr. s['x0'], s['y0'], s['sx'], s['sy'] = params x0, y0 = s['x0'], s['y0'] # lockout radius for this spike: lockr = min(self.lockrx * s['sx'], self.inclr) # in um # test y coords of inclchans in y array, ylockchaniis can be used to index # into x, y and inclchans: ylockchaniis, = np.where( np.abs(y - y0) <= lockr) # convert bool arr to int # test Euclid distance from x0, y0 for each ylockchani: lockchaniis = ylockchaniis.copy() for ylockchanii in ylockchaniis: if dist((x[ylockchanii], y[ylockchanii]), (x0, y0)) > lockr: lockchaniis = np.delete( lockchaniis, ylockchanii) # dist is too great lockchans = inclchans[lockchaniis] lockchanis = inclchanis[lockchaniis] nlockchans = len(lockchans) s['lockchans'][:nlockchans], s[ 'nlockchans'] = lockchans, nlockchans # just for testing: #assert (lockchanis == self.chans.searchsorted(lockchans)).all() #assert (lockchaniis == chanis.searchsorted(lockchanis)).all() else: # in this case, the inclchans and lockchans fields are redundant s['lockchans'][:ninclchans], s[ 'nlockchans'] = inclchans, ninclchans lockchanis = chanis lockchaniis = np.arange(ninclchans) # give each chan a distinct lockout, based on how each chan's # sharpest peaks line up with those of the maxchan. Respect existing lockouts: # on each of the relevant chans, keep whichever lockout ends last thislockout = t0i + tis.max(axis=1)[lockchaniis] lockouts[lockchanis] = np.max([lockouts[lockchanis], thislockout], axis=0) if DEBUG: self.log('lockouts=%r\nfor chans=%r' % (list(wave.ts[lockouts[lockchanis]]), list(self.chans[lockchanis]))) self.log('*** found new spike %d: t=%d chan=%d (%d, %d)' % (nspikes + self.nspikes, s['t'], chan, self.siteloc[chani, 0], self.siteloc[chani, 1])) nspikes += 1 # trim spikes and wavedata arrays down to size spikes.resize(nspikes, refcheck=False) wds = wavedata.shape wavedata.resize((nspikes, wds[1], wds[2]), refcheck=False) return spikes, wavedata
def check_wave(self, wave, cutrange): """Check which threshold-exceeding peaks in wave data look like spikes and return only events that fall within cutrange. Search local spatiotemporal window around threshold-exceeding peak for biggest peak-to-peak sharpness. Finally, test that the sharpest peak and its neighbour exceed Vp and Vpp thresholds""" sort = self.sort AD2uV = sort.converter.AD2uV if self.extractparamsondetect: weights2f = sort.extractor.weights2spatial f = sort.extractor.f # holds time indices for each enabled chan until which each enabled chani is # locked out, updated on every found spike lockouts = np.zeros(self.nchans, dtype=np.int64) tsharp = time.time() sharp = util.sharpness2D(wave.data) # sharpness of all zero-crossing separated peaks info('%s: sharpness2D() took %.3f sec' % (ps().name, time.time()-tsharp)) targthreshsharp = time.time() # threshold-exceeding peak indices (2D, columns are [tis, cis]) peakis = util.argthreshsharp(wave.data, self.thresh, sharp) info('%s: argthreshsharp() took %.3f sec' % (ps().name, time.time()-targthreshsharp)) maxti = len(wave.ts) - 1 dti = self.dti twi = sort.twi sdti = dti // 2 # spatial dti: max dti allowed between maxchan and all other chans nspikes = 0 npeaks = len(peakis) spikes = np.zeros(npeaks, self.SPIKEDTYPE) # nspikes will always be <= npeaks ## TODO: test whether np.empty or np.zeros is faster overall in this case wavedata = np.empty((npeaks, self.maxnchansperspike, self.maxnt), dtype=np.int16) # check each threshold-exceeding peak for validity: for peaki, (ti, chani) in enumerate(peakis): if DEBUG: self.log('*** trying thresh peak at t=%r chan=%d' % (wave.ts[ti], self.chans[chani])) # is this threshold-exceeding peak locked out? tlockoutchani = lockouts[chani] if ti <= tlockoutchani: if DEBUG: self.log('peak is locked out') continue # skip to next peak # find all enabled chanis within inclnbh of chani, lockouts are checked later: chanis = self.inclnbhdi[chani] nchans = len(chanis) # get search window DT on either side of this peak, for checking sharpness t0i = max(ti-dti, 0) # check for lockouts a bit later t1i = ti+dti+1 # +1 makes it end inclusive, don't worry about slicing past end window = wave.data[chanis, t0i:t1i] # search window, might not be contig if DEBUG: self.log('searching window (%d, %d) on chans=%r' % (wave.ts[t0i], wave.ts[t1i], list(self.chans[chanis]))) # Collect peak-to-peak sharpness for all chans. Save max and adjacent sharpness # timepoints for each chan, and keep track of which of the two adjacent non locked # out peaks is the sharpest. Note that the localsharp array contain sharpness of # all local peaks, not just those that exceed threshold, as in peakis array. localsharp = sharp[chanis, t0i:t1i] # sliced the same way as window ppsharp = np.zeros(nchans, dtype=np.float32) maxsharpis = np.zeros(nchans, dtype=int) adjpeakis = np.zeros((nchans, 2), dtype=int) maxadjiis = np.zeros(nchans, dtype=int) continuepeaki = False # signal to skip to next peaki for cii in range(nchans): localpeakis, = np.where(localsharp[cii] != 0.0) # keep only non-locked out localpeakis on this channel: localpeakis = localpeakis[(t0i+localpeakis) > lockouts[chanis[cii]]] if len(localpeakis) == 0: continue # localpeakis is empty lastpeakii = len(localpeakis) - 1 maxsharpii = abs(localsharp[cii, localpeakis]).argmax() maxsharpi = localpeakis[maxsharpii] maxsharpis[cii] = maxsharpi # Get one adjacent peak to left and right each. Due to limits, either or # both may be identical to the max sharpness peak adjpeakis[cii] = localpeakis[[max(maxsharpii-1, 0), min(maxsharpii+1, lastpeakii)]] if localsharp[cii, maxsharpi] < 0: maxadjii = localsharp[cii, adjpeakis[cii]].argmax() # look for +ve adj peak else: maxadjii = localsharp[cii, adjpeakis[cii]].argmin() # look for -ve adj peak maxadjiis[cii] = maxadjii # save adjpi = adjpeakis[cii, maxadjii] if maxsharpi != adjpi: ppsharp[cii] = localsharp[cii, maxsharpi] - localsharp[cii, adjpi] else: # monophasic spike, set ppsharp == sharpness of single peak: ppsharp[cii] = localsharp[cii, maxsharpi] if chanis[cii] == chani: # trigger chan is monophasic # ensure ppsharp of monophasic spike >= Vppthresh**2/dt, ie ensure that # its Vpp exceeds Vppthresh and has zero crossings on either side, # with no more than dt between. Avoids excessively wide # monophasic peaks from being considered as spikes: if DEBUG: self.log("found monophasic spike") if abs(ppsharp[cii]) < self.ppthresh[chani]**2 / dti: continuepeaki = True if DEBUG: self.log("peak wasn't sharp enough for a monophasic " "spike") break # out of cii loop if continuepeaki: continue # skip to next peak # Choose chan with biggest ppsharp as maxchan and its sharpest peak as the primary # peak, check that these new chani and ti values are identical to the trigger # values in peakis, that the peak at [chani, ti] isn't locked out, that it falls # within cutrange, and that it meets both Vp and Vpp threshold criteria. oldchani, oldti = chani, ti # save maxcii = abs(ppsharp).argmax() # choose chan with sharpest peak as new maxchan chani = chanis[maxcii] # update maxchan maxsharpi = maxsharpis[maxcii] # choose sharpest peak of maxchan, absolute ti = t0i + maxsharpi # update ti # Search forward through peakis for a future (later) row that matches the # (potentially new) [chani, ti] calculated above based on sharpness of local # peaks. If that particular tuple is indeed coming up, it is therefore # thresh exceeding, and should be waited for. If not, don't wait for it. Something # that was thresh exceeding caused the trigger, but this nearby [chani, ti] tuple # is according to the sharpness measure the best estimate of the spatiotemporal # origin of the trigger-causing event. newpeak_coming_up = (peakis[peaki+1:] == [ti, chani]).prod(axis=1).any() if chani != oldchani: if newpeak_coming_up: if DEBUG: self.log("triggered off peak on chan that isn't max ppsharpness for " "this event, pass on this peak and wait for the true " "sharpest peak to come later") continue # skip to next peak else: # update all variables that depend on chani that wouldn't otherwise be # updated: tlockoutchani = lockouts[chani] chanis = self.inclnbhdi[chani] nchans = len(chanis) if ti > oldti: if newpeak_coming_up: if DEBUG: self.log("triggered off early adjacent peak for this event, pass on " "this peak and wait for the true sharpest peak to come later") continue # skip to next peak else: # unlike chani, it seems that are no variables that depend on ti that # wouldn't otherwise be updated: pass if ti <= tlockoutchani: # sharpest peak is locked out if DEBUG: self.log('sharpest peak at t=%d chan=%d is locked out' % (wave.ts[ti], self.chans[chani])) continue # skip to next peak if not (cutrange[0] <= wave.ts[ti] <= cutrange[1]): # use %r since wave.ts[ti] is np.int64 and %d gives TypeError if > 2**31: if DEBUG: self.log("spike time %r falls outside cutrange for this searchblock " "call, discarding" % wave.ts[ti]) continue # skip to next peak # check that Vp threshold is exceeded by at least one of the two sharpest peaks adjpi = adjpeakis[maxcii, maxadjiis[maxcii]] # relative to t0i, not necessarily in temporal order: maxchantis = np.array([maxsharpi, adjpi]) # voltages of the two sharpest peaks, convert int16 to int64 to prevent overflow Vs = np.int64(window[maxcii, maxchantis]) Vp = abs(Vs).max() # grab biggest peak if Vp < self.thresh[chani]: if DEBUG: self.log('peak at t=%d chan=%d and its adjacent peak are both ' '< Vp=%f uV' % (wave.ts[ti], self.chans[chani], AD2uV(Vp))) continue # skip to next peak # check that the two sharpest peaks together exceed Vpp threshold: Vpp = abs(Vs[0] - Vs[1]) # Vs are of opposite sign, unless monophasic if Vpp == 0: # monophasic spike Vpp = Vp # use Vp as Vpp if Vpp < self.ppthresh[chani]: if DEBUG: self.log('peaks at t=%r chan=%d are < Vpp = %f' % (wave.ts[[ti, t0i+adjpi]], self.chans[chani], AD2uV(Vpp))) continue # skip to next peak if DEBUG: self.log('found biggest thresh exceeding ppsharp at t=%d chan=%d' % (wave.ts[ti], self.chans[chani])) # get new spatiotemporal neighbourhood, with full window, # align to -ve of the two sharpest peaks aligni = localsharp[maxcii, maxchantis].argmin() #oldti = ti # save ti = t0i + maxchantis[aligni] # new absolute time index to align to # cut new window oldt0i = t0i t0i = max(ti+twi[0], 0) t1i = min(ti+twi[1]+1, maxti) # end inclusive window = wave.data[chanis, t0i:t1i] # multichan data window, might not be contig maxcii, = np.where(chanis == chani) maxchantis += oldt0i - t0i # relative to new t0i tis = np.zeros((nchans, 2), dtype=int) # holds time indices for each lockchani tis[maxcii] = maxchantis # pick corresponding peaks on other chans according to how close they are # to those on maxchan, Don't consider the sign of the peaks on each # chan, just their proximity in time. In other words, allow for spike # inversion across space localsharp = sharp[chanis, t0i:t1i] peak0ti, peak1ti = maxchantis # primary and 2ndary peak tis of maxchan for cii in range(nchans): if cii == maxcii: # already set continue localpeakis, = np.where(localsharp[cii] != 0.0) # keep only non-locked out localpeakis on this channel: localpeakis = localpeakis[(t0i+localpeakis) > lockouts[chanis[cii]]] if len(localpeakis) == 0: # localpeakis is empty tis[cii] = maxchantis # use same tis as maxchan continue lastpeakii = len(localpeakis) - 1 # find peak on this chan that's temporally closest to primary peak on maxchan. # If two peaks are equally close, pick the sharpest one dt0is = abs(localpeakis-peak0ti) if (np.diff(dt0is) == 0).any(): # two peaks equally close, pick sharpest one peak0ii = abs(localsharp[cii, localpeakis]).argmax() else: peak0ii = dt0is.argmin() # save primary peak for this cii dt0i = dt0is[peak0ii] if dt0i > sdti: # too distant in time tis[cii, 0] = peak0ti # use same t0i as maxchan else: # give it its own t0i tis[cii, 0] = localpeakis[peak0ii] # save 2ndary peak for this cii if len(localpeakis) == 1: # monophasic, set 2ndary peak same as primary tis[cii, 1] = tis[cii, 0] continue if peak0ti <= peak1ti: # primary peak comes first (more common case) peak1ii = min(peak0ii+1, lastpeakii) # 2ndary peak is 1 to the right else: # peak1ti < peak0ti, ie 2ndary peak comes first peak1ii = max(peak0ii-1, 0) # 2ndary peak is 1 to the left dt1is = abs(localpeakis-peak1ti) dt1i = dt1is[peak1ii] if dt1i > sdti: # too distant in time tis[cii, 1] = peak1ti # use same t1i as maxchan else: tis[cii, 1] = localpeakis[peak1ii] # based on maxchan (chani), find inclchanis, incltis, and inclwindow: inclchanis = self.inclnbhdi[chani] ninclchans = len(inclchanis) inclchans = self.chans[inclchanis] chan = self.chans[chani] inclchani = int(np.where(inclchans == chan)[0]) # != chani! inclciis = chanis.searchsorted(inclchanis) incltis = tis[inclciis] inclwindow = window[inclciis] if DEBUG: self.log("final window params: t0=%r, t1=%r, Vs=%r, peakts=\n%r" % (wave.ts[t0i], wave.ts[t1i], list(AD2uV(Vs)), wave.ts[t0i+tis])) if self.extractparamsondetect: # Get Vpp at each inclchan's tis, use as spatial weights: # see core.rowtake() or util.rowtake_cy() for indexing explanation: w = np.float32(inclwindow[np.arange(ninclchans)[:, None], incltis]) w = abs(w).sum(axis=1) x = self.siteloc[inclchanis, 0] # 1D array (row) y = self.siteloc[inclchanis, 1] params = weights2f(f, w, x, y, inclchani) if params == None: # presumably a non-localizable many-channel noise event if DEBUG: treject = intround(wave.ts[ti]) # nearest us self.log("reject spike at t=%d based on fit params" % treject) # no real need to lockout chans for a params-rejected spike continue # skip to next peak # build up spike record: s = spikes[nspikes] # wave.ts might be floats, depending on sampfreq s['t'] = intround(wave.ts[ti]) # nearest us # leave each spike's chanis in sorted order, as they are in self.inclnbhdi, # important assumption used later on, like in sort.get_wave() and # Neuron.update_wave() ts = wave.ts[t0i:t1i] # potentially floats # use ts = np.arange(s['t0'], s['t1'], stream.tres) to reconstruct s['t0'], s['t1'] = intround(wave.ts[t0i]), intround(wave.ts[t1i]) # nearest us s['tis'][:ninclchans] = incltis # wrt t0i=0 s['aligni'] = aligni # 0 or 1 s['dt'] = intround(abs(ts[tis[maxcii, 0]] - ts[tis[maxcii, 1]])) # nearest us s['V0'], s['V1'] = AD2uV(Vs) # in uV s['Vpp'] = AD2uV(Vpp) # in uV s['chan'], s['chans'][:ninclchans], s['nchans'] = chan, inclchans, ninclchans s['chani'] = inclchani nt = inclwindow.shape[1] # isn't always full width if recording has gaps wavedata[nspikes, :ninclchans, :nt] = inclwindow if self.extractparamsondetect: # Save spatial fit params, and lockout only the channels within lockrx*sx # of the fit spatial location of the spike, up to a max of self.inclr. s['x0'], s['y0'], s['sx'], s['sy'] = params x0, y0 = s['x0'], s['y0'] # lockout radius for this spike: lockr = min(self.lockrx*s['sx'], self.inclr) # in um # test y coords of inclchans in y array, ylockchaniis can be used to index # into x, y and inclchans: ylockchaniis, = np.where(np.abs(y - y0) <= lockr) # convert bool arr to int # test Euclid distance from x0, y0 for each ylockchani: lockchaniis = ylockchaniis.copy() for ylockchanii in ylockchaniis: if dist((x[ylockchanii], y[ylockchanii]), (x0, y0)) > lockr: lockchaniis = np.delete(lockchaniis, ylockchanii) # dist is too great lockchans = inclchans[lockchaniis] lockchanis = inclchanis[lockchaniis] nlockchans = len(lockchans) s['lockchans'][:nlockchans], s['nlockchans'] = lockchans, nlockchans # just for testing: #assert (lockchanis == self.chans.searchsorted(lockchans)).all() #assert (lockchaniis == chanis.searchsorted(lockchanis)).all() else: # in this case, the inclchans and lockchans fields are redundant s['lockchans'][:ninclchans], s['nlockchans'] = inclchans, ninclchans lockchanis = chanis lockchaniis = np.arange(ninclchans) # give each chan a distinct lockout, based on how each chan's # sharpest peaks line up with those of the maxchan. Respect existing lockouts: # on each of the relevant chans, keep whichever lockout ends last thislockout = t0i+tis.max(axis=1)[lockchaniis] lockouts[lockchanis] = np.max([lockouts[lockchanis], thislockout], axis=0) if DEBUG: self.log('lockouts=%r\nfor chans=%r' % (list(wave.ts[lockouts[lockchanis]]), list(self.chans[lockchanis]))) self.log('*** found new spike %d: t=%d chan=%d (%d, %d)' % (nspikes+self.nspikes, s['t'], chan, self.siteloc[chani, 0], self.siteloc[chani, 1])) nspikes += 1 # trim spikes and wavedata arrays down to size spikes.resize(nspikes, refcheck=False) wds = wavedata.shape wavedata.resize((nspikes, wds[1], wds[2]), refcheck=False) return spikes, wavedata