def get_seps(nids, nd): """Build flattened array of distances between all unique pairs in nids, given neuron dict nd""" nn = len(nids) lti = np.tril_indices(nn, -1) # lower triangle (below diagonal) indices, ie unique pairs seps = [] for nidi0, nidi1 in np.asarray(lti).T: sep = dist(nd[nids[nidi0]].pos, nd[nids[nidi1]].pos) seps.append(sep) seps = np.hstack(seps) return seps
def get_seps(nids, nd): """Build flattened array of distances between all unique pairs in nids, given neuron dict nd""" nn = len(nids) lti = np.tril_indices( nn, -1) # lower triangle (below diagonal) indices, ie unique pairs seps = [] for nidi0, nidi1 in np.asarray(lti).T: sep = dist(nd[nids[nidi0]].pos, nd[nids[nidi1]].pos) seps.append(sep) seps = np.hstack(seps) return seps
xs = np.unique(track.chanpos[:, 0]) ys = np.unique(track.chanpos[:, 1]) dx = np.diff(xs)[0] dy = np.diff(ys)[0] w = max(xs) - min(xs) + dx h = max(ys) - min(ys) + dy xlims.append([min(xs)-dx/2, max(xs)+dx/2]) ylims.append([min(ys)-dy/2, max(ys)+dy/2]) areas.append(w*h) # in square um N = 0 ds = [] for track in tracks: neurons = track.alln.values() for neuron in neurons: d = [ dist(neuron.pos, cp) for cp in neuron.sort.chanpos ] ds.append(min(d)) N += len(neurons) ds = np.hstack(ds) # calculate theoretical distribution, derived from concentric annuli: rho = N / sum(areas) # average cell density per unit polytrode area (1/um^2) midbins = edges[:-1] + intround(binw/2) # middle of each bin theory = rho*2*pi*midbins/binw # normed distrib expected for randomly distributed units # simulate numerically by randomly distributing points aroound a polytrode and measuring # the distribution of distances between each point and its nearest electrode site: umap1a = ptc22.tr1.sort.chanpos area1a, xlims1a, ylims1a = areas[1], xlims[1], ylims[1] nseed = intround(rho * area) nrandom = 100000
def check_wave(self, wave, cutrange): """Check which threshold-exceeding peaks in wave data look like spikes and return only events that fall within cutrange. Search local spatiotemporal window around threshold-exceeding peak for biggest peak-to-peak sharpness. Finally, test that the sharpest peak and its neighbour exceed Vp and Vpp thresholds""" sort = self.sort AD2uV = sort.converter.AD2uV if self.extractparamsondetect: weights2f = sort.extractor.weights2spatial f = sort.extractor.f # holds time indices for each enabled chan until which each enabled chani is # locked out, updated on every found spike lockouts = np.zeros(self.nchans, dtype=np.int64) tsharp = time.time() sharp = util.sharpness2D( wave.data) # sharpness of all zero-crossing separated peaks info('%s: sharpness2D() took %.3f sec' % (ps().name, time.time() - tsharp)) targthreshsharp = time.time() # threshold-exceeding peak indices (2D, columns are [tis, cis]) peakis = util.argthreshsharp(wave.data, self.thresh, sharp) info('%s: argthreshsharp() took %.3f sec' % (ps().name, time.time() - targthreshsharp)) maxti = len(wave.ts) - 1 dti = self.dti twi = sort.twi sdti = dti // 2 # spatial dti: max dti allowed between maxchan and all other chans nspikes = 0 npeaks = len(peakis) spikes = np.zeros(npeaks, self.SPIKEDTYPE) # nspikes will always be <= npeaks ## TODO: test whether np.empty or np.zeros is faster overall in this case wavedata = np.empty((npeaks, self.maxnchansperspike, self.maxnt), dtype=np.int16) # check each threshold-exceeding peak for validity: for peaki, (ti, chani) in enumerate(peakis): if DEBUG: self.log('*** trying thresh peak at t=%r chan=%d' % (wave.ts[ti], self.chans[chani])) # is this threshold-exceeding peak locked out? tlockoutchani = lockouts[chani] if ti <= tlockoutchani: if DEBUG: self.log('peak is locked out') continue # skip to next peak # find all enabled chanis within inclnbh of chani, lockouts are checked later: chanis = self.inclnbhdi[chani] nchans = len(chanis) # get search window DT on either side of this peak, for checking sharpness t0i = max(ti - dti, 0) # check for lockouts a bit later t1i = ti + dti + 1 # +1 makes it end inclusive, don't worry about slicing past end window = wave.data[chanis, t0i:t1i] # search window, might not be contig if DEBUG: self.log( 'searching window (%d, %d) on chans=%r' % (wave.ts[t0i], wave.ts[t1i], list(self.chans[chanis]))) # Collect peak-to-peak sharpness for all chans. Save max and adjacent sharpness # timepoints for each chan, and keep track of which of the two adjacent non locked # out peaks is the sharpest. Note that the localsharp array contain sharpness of # all local peaks, not just those that exceed threshold, as in peakis array. localsharp = sharp[chanis, t0i:t1i] # sliced the same way as window ppsharp = np.zeros(nchans, dtype=np.float32) maxsharpis = np.zeros(nchans, dtype=int) adjpeakis = np.zeros((nchans, 2), dtype=int) maxadjiis = np.zeros(nchans, dtype=int) continuepeaki = False # signal to skip to next peaki for cii in range(nchans): localpeakis, = np.where(localsharp[cii] != 0.0) # keep only non-locked out localpeakis on this channel: localpeakis = localpeakis[( t0i + localpeakis) > lockouts[chanis[cii]]] if len(localpeakis) == 0: continue # localpeakis is empty lastpeakii = len(localpeakis) - 1 maxsharpii = abs(localsharp[cii, localpeakis]).argmax() maxsharpi = localpeakis[maxsharpii] maxsharpis[cii] = maxsharpi # Get one adjacent peak to left and right each. Due to limits, either or # both may be identical to the max sharpness peak adjpeakis[cii] = localpeakis[[ max(maxsharpii - 1, 0), min(maxsharpii + 1, lastpeakii) ]] if localsharp[cii, maxsharpi] < 0: maxadjii = localsharp[ cii, adjpeakis[cii]].argmax() # look for +ve adj peak else: maxadjii = localsharp[ cii, adjpeakis[cii]].argmin() # look for -ve adj peak maxadjiis[cii] = maxadjii # save adjpi = adjpeakis[cii, maxadjii] if maxsharpi != adjpi: ppsharp[cii] = localsharp[cii, maxsharpi] - localsharp[cii, adjpi] else: # monophasic spike, set ppsharp == sharpness of single peak: ppsharp[cii] = localsharp[cii, maxsharpi] if chanis[cii] == chani: # trigger chan is monophasic # ensure ppsharp of monophasic spike >= Vppthresh**2/dt, ie ensure that # its Vpp exceeds Vppthresh and has zero crossings on either side, # with no more than dt between. Avoids excessively wide # monophasic peaks from being considered as spikes: if DEBUG: self.log("found monophasic spike") if abs(ppsharp[cii]) < self.ppthresh[chani]**2 / dti: continuepeaki = True if DEBUG: self.log( "peak wasn't sharp enough for a monophasic " "spike") break # out of cii loop if continuepeaki: continue # skip to next peak # Choose chan with biggest ppsharp as maxchan and its sharpest peak as the primary # peak, check that these new chani and ti values are identical to the trigger # values in peakis, that the peak at [chani, ti] isn't locked out, that it falls # within cutrange, and that it meets both Vp and Vpp threshold criteria. oldchani, oldti = chani, ti # save maxcii = abs(ppsharp).argmax( ) # choose chan with sharpest peak as new maxchan chani = chanis[maxcii] # update maxchan maxsharpi = maxsharpis[ maxcii] # choose sharpest peak of maxchan, absolute ti = t0i + maxsharpi # update ti # Search forward through peakis for a future (later) row that matches the # (potentially new) [chani, ti] calculated above based on sharpness of local # peaks. If that particular tuple is indeed coming up, it is therefore # thresh exceeding, and should be waited for. If not, don't wait for it. Something # that was thresh exceeding caused the trigger, but this nearby [chani, ti] tuple # is according to the sharpness measure the best estimate of the spatiotemporal # origin of the trigger-causing event. newpeak_coming_up = (peakis[peaki + 1:] == [ti, chani ]).prod(axis=1).any() if chani != oldchani: if newpeak_coming_up: if DEBUG: self.log( "triggered off peak on chan that isn't max ppsharpness for " "this event, pass on this peak and wait for the true " "sharpest peak to come later") continue # skip to next peak else: # update all variables that depend on chani that wouldn't otherwise be # updated: tlockoutchani = lockouts[chani] chanis = self.inclnbhdi[chani] nchans = len(chanis) if ti > oldti: if newpeak_coming_up: if DEBUG: self.log( "triggered off early adjacent peak for this event, pass on " "this peak and wait for the true sharpest peak to come later" ) continue # skip to next peak else: # unlike chani, it seems that are no variables that depend on ti that # wouldn't otherwise be updated: pass if ti <= tlockoutchani: # sharpest peak is locked out if DEBUG: self.log('sharpest peak at t=%d chan=%d is locked out' % (wave.ts[ti], self.chans[chani])) continue # skip to next peak if not (cutrange[0] <= wave.ts[ti] <= cutrange[1]): # use %r since wave.ts[ti] is np.int64 and %d gives TypeError if > 2**31: if DEBUG: self.log( "spike time %r falls outside cutrange for this searchblock " "call, discarding" % wave.ts[ti]) continue # skip to next peak # check that Vp threshold is exceeded by at least one of the two sharpest peaks adjpi = adjpeakis[maxcii, maxadjiis[maxcii]] # relative to t0i, not necessarily in temporal order: maxchantis = np.array([maxsharpi, adjpi]) # voltages of the two sharpest peaks, convert int16 to int64 to prevent overflow Vs = np.int64(window[maxcii, maxchantis]) Vp = abs(Vs).max() # grab biggest peak if Vp < self.thresh[chani]: if DEBUG: self.log( 'peak at t=%d chan=%d and its adjacent peak are both ' '< Vp=%f uV' % (wave.ts[ti], self.chans[chani], AD2uV(Vp))) continue # skip to next peak # check that the two sharpest peaks together exceed Vpp threshold: Vpp = abs(Vs[0] - Vs[1]) # Vs are of opposite sign, unless monophasic if Vpp == 0: # monophasic spike Vpp = Vp # use Vp as Vpp if Vpp < self.ppthresh[chani]: if DEBUG: self.log('peaks at t=%r chan=%d are < Vpp = %f' % (wave.ts[[ti, t0i + adjpi ]], self.chans[chani], AD2uV(Vpp))) continue # skip to next peak if DEBUG: self.log( 'found biggest thresh exceeding ppsharp at t=%d chan=%d' % (wave.ts[ti], self.chans[chani])) # get new spatiotemporal neighbourhood, with full window, # align to -ve of the two sharpest peaks aligni = localsharp[maxcii, maxchantis].argmin() #oldti = ti # save ti = t0i + maxchantis[ aligni] # new absolute time index to align to # cut new window oldt0i = t0i t0i = max(ti + twi[0], 0) t1i = min(ti + twi[1] + 1, maxti) # end inclusive window = wave.data[ chanis, t0i:t1i] # multichan data window, might not be contig maxcii, = np.where(chanis == chani) maxchantis += oldt0i - t0i # relative to new t0i tis = np.zeros((nchans, 2), dtype=int) # holds time indices for each lockchani tis[maxcii] = maxchantis # pick corresponding peaks on other chans according to how close they are # to those on maxchan, Don't consider the sign of the peaks on each # chan, just their proximity in time. In other words, allow for spike # inversion across space localsharp = sharp[chanis, t0i:t1i] peak0ti, peak1ti = maxchantis # primary and 2ndary peak tis of maxchan for cii in range(nchans): if cii == maxcii: # already set continue localpeakis, = np.where(localsharp[cii] != 0.0) # keep only non-locked out localpeakis on this channel: localpeakis = localpeakis[( t0i + localpeakis) > lockouts[chanis[cii]]] if len(localpeakis) == 0: # localpeakis is empty tis[cii] = maxchantis # use same tis as maxchan continue lastpeakii = len(localpeakis) - 1 # find peak on this chan that's temporally closest to primary peak on maxchan. # If two peaks are equally close, pick the sharpest one dt0is = abs(localpeakis - peak0ti) if (np.diff(dt0is) == 0 ).any(): # two peaks equally close, pick sharpest one peak0ii = abs(localsharp[cii, localpeakis]).argmax() else: peak0ii = dt0is.argmin() # save primary peak for this cii dt0i = dt0is[peak0ii] if dt0i > sdti: # too distant in time tis[cii, 0] = peak0ti # use same t0i as maxchan else: # give it its own t0i tis[cii, 0] = localpeakis[peak0ii] # save 2ndary peak for this cii if len(localpeakis ) == 1: # monophasic, set 2ndary peak same as primary tis[cii, 1] = tis[cii, 0] continue if peak0ti <= peak1ti: # primary peak comes first (more common case) peak1ii = min(peak0ii + 1, lastpeakii) # 2ndary peak is 1 to the right else: # peak1ti < peak0ti, ie 2ndary peak comes first peak1ii = max(peak0ii - 1, 0) # 2ndary peak is 1 to the left dt1is = abs(localpeakis - peak1ti) dt1i = dt1is[peak1ii] if dt1i > sdti: # too distant in time tis[cii, 1] = peak1ti # use same t1i as maxchan else: tis[cii, 1] = localpeakis[peak1ii] # based on maxchan (chani), find inclchanis, incltis, and inclwindow: inclchanis = self.inclnbhdi[chani] ninclchans = len(inclchanis) inclchans = self.chans[inclchanis] chan = self.chans[chani] inclchani = int(np.where(inclchans == chan)[0]) # != chani! inclciis = chanis.searchsorted(inclchanis) incltis = tis[inclciis] inclwindow = window[inclciis] if DEBUG: self.log( "final window params: t0=%r, t1=%r, Vs=%r, peakts=\n%r" % (wave.ts[t0i], wave.ts[t1i], list( AD2uV(Vs)), wave.ts[t0i + tis])) if self.extractparamsondetect: # Get Vpp at each inclchan's tis, use as spatial weights: # see core.rowtake() or util.rowtake_cy() for indexing explanation: w = np.float32(inclwindow[np.arange(ninclchans)[:, None], incltis]) w = abs(w).sum(axis=1) x = self.siteloc[inclchanis, 0] # 1D array (row) y = self.siteloc[inclchanis, 1] params = weights2f(f, w, x, y, inclchani) if params == None: # presumably a non-localizable many-channel noise event if DEBUG: treject = intround(wave.ts[ti]) # nearest us self.log("reject spike at t=%d based on fit params" % treject) # no real need to lockout chans for a params-rejected spike continue # skip to next peak # build up spike record: s = spikes[nspikes] # wave.ts might be floats, depending on sampfreq s['t'] = intround(wave.ts[ti]) # nearest us # leave each spike's chanis in sorted order, as they are in self.inclnbhdi, # important assumption used later on, like in sort.get_wave() and # Neuron.update_wave() ts = wave.ts[t0i:t1i] # potentially floats # use ts = np.arange(s['t0'], s['t1'], stream.tres) to reconstruct s['t0'], s['t1'] = intround(wave.ts[t0i]), intround( wave.ts[t1i]) # nearest us s['tis'][:ninclchans] = incltis # wrt t0i=0 s['aligni'] = aligni # 0 or 1 s['dt'] = intround(abs(ts[tis[maxcii, 0]] - ts[tis[maxcii, 1]])) # nearest us s['V0'], s['V1'] = AD2uV(Vs) # in uV s['Vpp'] = AD2uV(Vpp) # in uV s['chan'], s['chans'][:ninclchans], s[ 'nchans'] = chan, inclchans, ninclchans s['chani'] = inclchani nt = inclwindow.shape[ 1] # isn't always full width if recording has gaps wavedata[nspikes, :ninclchans, :nt] = inclwindow if self.extractparamsondetect: # Save spatial fit params, and lockout only the channels within lockrx*sx # of the fit spatial location of the spike, up to a max of self.inclr. s['x0'], s['y0'], s['sx'], s['sy'] = params x0, y0 = s['x0'], s['y0'] # lockout radius for this spike: lockr = min(self.lockrx * s['sx'], self.inclr) # in um # test y coords of inclchans in y array, ylockchaniis can be used to index # into x, y and inclchans: ylockchaniis, = np.where( np.abs(y - y0) <= lockr) # convert bool arr to int # test Euclid distance from x0, y0 for each ylockchani: lockchaniis = ylockchaniis.copy() for ylockchanii in ylockchaniis: if dist((x[ylockchanii], y[ylockchanii]), (x0, y0)) > lockr: lockchaniis = np.delete( lockchaniis, ylockchanii) # dist is too great lockchans = inclchans[lockchaniis] lockchanis = inclchanis[lockchaniis] nlockchans = len(lockchans) s['lockchans'][:nlockchans], s[ 'nlockchans'] = lockchans, nlockchans # just for testing: #assert (lockchanis == self.chans.searchsorted(lockchans)).all() #assert (lockchaniis == chanis.searchsorted(lockchanis)).all() else: # in this case, the inclchans and lockchans fields are redundant s['lockchans'][:ninclchans], s[ 'nlockchans'] = inclchans, ninclchans lockchanis = chanis lockchaniis = np.arange(ninclchans) # give each chan a distinct lockout, based on how each chan's # sharpest peaks line up with those of the maxchan. Respect existing lockouts: # on each of the relevant chans, keep whichever lockout ends last thislockout = t0i + tis.max(axis=1)[lockchaniis] lockouts[lockchanis] = np.max([lockouts[lockchanis], thislockout], axis=0) if DEBUG: self.log('lockouts=%r\nfor chans=%r' % (list(wave.ts[lockouts[lockchanis]]), list(self.chans[lockchanis]))) self.log('*** found new spike %d: t=%d chan=%d (%d, %d)' % (nspikes + self.nspikes, s['t'], chan, self.siteloc[chani, 0], self.siteloc[chani, 1])) nspikes += 1 # trim spikes and wavedata arrays down to size spikes.resize(nspikes, refcheck=False) wds = wavedata.shape wavedata.resize((nspikes, wds[1], wds[2]), refcheck=False) return spikes, wavedata
xs = np.unique(track.chanpos[:, 0]) ys = np.unique(track.chanpos[:, 1]) dx = np.diff(xs)[0] dy = np.diff(ys)[0] w = max(xs) - min(xs) + dx h = max(ys) - min(ys) + dy xlims.append([min(xs) - dx / 2, max(xs) + dx / 2]) ylims.append([min(ys) - dy / 2, max(ys) + dy / 2]) areas.append(w * h) # in square um N = 0 ds, sigmas = [], [] for track in tracks: neurons = list(track.alln.values()) for neuron in neurons: d = [dist(neuron.pos, cp) for cp in neuron.sort.chanpos] ds.append(min(d)) sigmas.append(neuron.sigma) N += len(neurons) ds = np.hstack(ds) sigmas = np.hstack(sigmas) # calculate theoretical distribution, derived from concentric annuli: rho = N / sum(areas) # average cell density per unit polytrode area (1/um^2) midbins = edges[:-1] + intround(binw / 2) # middle of each bin theory = rho * 2 * pi * midbins / binw # normed distrib expected for randomly distributed units # simulate numerically by randomly and uniformly distributing points around a polytrode and # measuring the distribution of distances between each point and its nearest electrode site: if MODEL: # can take a long time to run umap1a = ptc22.tr1.sort.chanpos
def check_wave(self, wave, cutrange): """Check which threshold-exceeding peaks in wave data look like spikes and return only events that fall within cutrange. Search local spatiotemporal window around threshold-exceeding peak for biggest peak-to-peak sharpness. Finally, test that the sharpest peak and its neighbour exceed Vp and Vpp thresholds""" sort = self.sort AD2uV = sort.converter.AD2uV if self.extractparamsondetect: weights2f = sort.extractor.weights2spatial f = sort.extractor.f # holds time indices for each enabled chan until which each enabled chani is # locked out, updated on every found spike lockouts = np.zeros(self.nchans, dtype=np.int64) tsharp = time.time() sharp = util.sharpness2D(wave.data) # sharpness of all zero-crossing separated peaks info('%s: sharpness2D() took %.3f sec' % (ps().name, time.time()-tsharp)) targthreshsharp = time.time() # threshold-exceeding peak indices (2D, columns are [tis, cis]) peakis = util.argthreshsharp(wave.data, self.thresh, sharp) info('%s: argthreshsharp() took %.3f sec' % (ps().name, time.time()-targthreshsharp)) maxti = len(wave.ts) - 1 dti = self.dti twi = sort.twi sdti = dti // 2 # spatial dti: max dti allowed between maxchan and all other chans nspikes = 0 npeaks = len(peakis) spikes = np.zeros(npeaks, self.SPIKEDTYPE) # nspikes will always be <= npeaks ## TODO: test whether np.empty or np.zeros is faster overall in this case wavedata = np.empty((npeaks, self.maxnchansperspike, self.maxnt), dtype=np.int16) # check each threshold-exceeding peak for validity: for peaki, (ti, chani) in enumerate(peakis): if DEBUG: self.log('*** trying thresh peak at t=%r chan=%d' % (wave.ts[ti], self.chans[chani])) # is this threshold-exceeding peak locked out? tlockoutchani = lockouts[chani] if ti <= tlockoutchani: if DEBUG: self.log('peak is locked out') continue # skip to next peak # find all enabled chanis within inclnbh of chani, lockouts are checked later: chanis = self.inclnbhdi[chani] nchans = len(chanis) # get search window DT on either side of this peak, for checking sharpness t0i = max(ti-dti, 0) # check for lockouts a bit later t1i = ti+dti+1 # +1 makes it end inclusive, don't worry about slicing past end window = wave.data[chanis, t0i:t1i] # search window, might not be contig if DEBUG: self.log('searching window (%d, %d) on chans=%r' % (wave.ts[t0i], wave.ts[t1i], list(self.chans[chanis]))) # Collect peak-to-peak sharpness for all chans. Save max and adjacent sharpness # timepoints for each chan, and keep track of which of the two adjacent non locked # out peaks is the sharpest. Note that the localsharp array contain sharpness of # all local peaks, not just those that exceed threshold, as in peakis array. localsharp = sharp[chanis, t0i:t1i] # sliced the same way as window ppsharp = np.zeros(nchans, dtype=np.float32) maxsharpis = np.zeros(nchans, dtype=int) adjpeakis = np.zeros((nchans, 2), dtype=int) maxadjiis = np.zeros(nchans, dtype=int) continuepeaki = False # signal to skip to next peaki for cii in range(nchans): localpeakis, = np.where(localsharp[cii] != 0.0) # keep only non-locked out localpeakis on this channel: localpeakis = localpeakis[(t0i+localpeakis) > lockouts[chanis[cii]]] if len(localpeakis) == 0: continue # localpeakis is empty lastpeakii = len(localpeakis) - 1 maxsharpii = abs(localsharp[cii, localpeakis]).argmax() maxsharpi = localpeakis[maxsharpii] maxsharpis[cii] = maxsharpi # Get one adjacent peak to left and right each. Due to limits, either or # both may be identical to the max sharpness peak adjpeakis[cii] = localpeakis[[max(maxsharpii-1, 0), min(maxsharpii+1, lastpeakii)]] if localsharp[cii, maxsharpi] < 0: maxadjii = localsharp[cii, adjpeakis[cii]].argmax() # look for +ve adj peak else: maxadjii = localsharp[cii, adjpeakis[cii]].argmin() # look for -ve adj peak maxadjiis[cii] = maxadjii # save adjpi = adjpeakis[cii, maxadjii] if maxsharpi != adjpi: ppsharp[cii] = localsharp[cii, maxsharpi] - localsharp[cii, adjpi] else: # monophasic spike, set ppsharp == sharpness of single peak: ppsharp[cii] = localsharp[cii, maxsharpi] if chanis[cii] == chani: # trigger chan is monophasic # ensure ppsharp of monophasic spike >= Vppthresh**2/dt, ie ensure that # its Vpp exceeds Vppthresh and has zero crossings on either side, # with no more than dt between. Avoids excessively wide # monophasic peaks from being considered as spikes: if DEBUG: self.log("found monophasic spike") if abs(ppsharp[cii]) < self.ppthresh[chani]**2 / dti: continuepeaki = True if DEBUG: self.log("peak wasn't sharp enough for a monophasic " "spike") break # out of cii loop if continuepeaki: continue # skip to next peak # Choose chan with biggest ppsharp as maxchan and its sharpest peak as the primary # peak, check that these new chani and ti values are identical to the trigger # values in peakis, that the peak at [chani, ti] isn't locked out, that it falls # within cutrange, and that it meets both Vp and Vpp threshold criteria. oldchani, oldti = chani, ti # save maxcii = abs(ppsharp).argmax() # choose chan with sharpest peak as new maxchan chani = chanis[maxcii] # update maxchan maxsharpi = maxsharpis[maxcii] # choose sharpest peak of maxchan, absolute ti = t0i + maxsharpi # update ti # Search forward through peakis for a future (later) row that matches the # (potentially new) [chani, ti] calculated above based on sharpness of local # peaks. If that particular tuple is indeed coming up, it is therefore # thresh exceeding, and should be waited for. If not, don't wait for it. Something # that was thresh exceeding caused the trigger, but this nearby [chani, ti] tuple # is according to the sharpness measure the best estimate of the spatiotemporal # origin of the trigger-causing event. newpeak_coming_up = (peakis[peaki+1:] == [ti, chani]).prod(axis=1).any() if chani != oldchani: if newpeak_coming_up: if DEBUG: self.log("triggered off peak on chan that isn't max ppsharpness for " "this event, pass on this peak and wait for the true " "sharpest peak to come later") continue # skip to next peak else: # update all variables that depend on chani that wouldn't otherwise be # updated: tlockoutchani = lockouts[chani] chanis = self.inclnbhdi[chani] nchans = len(chanis) if ti > oldti: if newpeak_coming_up: if DEBUG: self.log("triggered off early adjacent peak for this event, pass on " "this peak and wait for the true sharpest peak to come later") continue # skip to next peak else: # unlike chani, it seems that are no variables that depend on ti that # wouldn't otherwise be updated: pass if ti <= tlockoutchani: # sharpest peak is locked out if DEBUG: self.log('sharpest peak at t=%d chan=%d is locked out' % (wave.ts[ti], self.chans[chani])) continue # skip to next peak if not (cutrange[0] <= wave.ts[ti] <= cutrange[1]): # use %r since wave.ts[ti] is np.int64 and %d gives TypeError if > 2**31: if DEBUG: self.log("spike time %r falls outside cutrange for this searchblock " "call, discarding" % wave.ts[ti]) continue # skip to next peak # check that Vp threshold is exceeded by at least one of the two sharpest peaks adjpi = adjpeakis[maxcii, maxadjiis[maxcii]] # relative to t0i, not necessarily in temporal order: maxchantis = np.array([maxsharpi, adjpi]) # voltages of the two sharpest peaks, convert int16 to int64 to prevent overflow Vs = np.int64(window[maxcii, maxchantis]) Vp = abs(Vs).max() # grab biggest peak if Vp < self.thresh[chani]: if DEBUG: self.log('peak at t=%d chan=%d and its adjacent peak are both ' '< Vp=%f uV' % (wave.ts[ti], self.chans[chani], AD2uV(Vp))) continue # skip to next peak # check that the two sharpest peaks together exceed Vpp threshold: Vpp = abs(Vs[0] - Vs[1]) # Vs are of opposite sign, unless monophasic if Vpp == 0: # monophasic spike Vpp = Vp # use Vp as Vpp if Vpp < self.ppthresh[chani]: if DEBUG: self.log('peaks at t=%r chan=%d are < Vpp = %f' % (wave.ts[[ti, t0i+adjpi]], self.chans[chani], AD2uV(Vpp))) continue # skip to next peak if DEBUG: self.log('found biggest thresh exceeding ppsharp at t=%d chan=%d' % (wave.ts[ti], self.chans[chani])) # get new spatiotemporal neighbourhood, with full window, # align to -ve of the two sharpest peaks aligni = localsharp[maxcii, maxchantis].argmin() #oldti = ti # save ti = t0i + maxchantis[aligni] # new absolute time index to align to # cut new window oldt0i = t0i t0i = max(ti+twi[0], 0) t1i = min(ti+twi[1]+1, maxti) # end inclusive window = wave.data[chanis, t0i:t1i] # multichan data window, might not be contig maxcii, = np.where(chanis == chani) maxchantis += oldt0i - t0i # relative to new t0i tis = np.zeros((nchans, 2), dtype=int) # holds time indices for each lockchani tis[maxcii] = maxchantis # pick corresponding peaks on other chans according to how close they are # to those on maxchan, Don't consider the sign of the peaks on each # chan, just their proximity in time. In other words, allow for spike # inversion across space localsharp = sharp[chanis, t0i:t1i] peak0ti, peak1ti = maxchantis # primary and 2ndary peak tis of maxchan for cii in range(nchans): if cii == maxcii: # already set continue localpeakis, = np.where(localsharp[cii] != 0.0) # keep only non-locked out localpeakis on this channel: localpeakis = localpeakis[(t0i+localpeakis) > lockouts[chanis[cii]]] if len(localpeakis) == 0: # localpeakis is empty tis[cii] = maxchantis # use same tis as maxchan continue lastpeakii = len(localpeakis) - 1 # find peak on this chan that's temporally closest to primary peak on maxchan. # If two peaks are equally close, pick the sharpest one dt0is = abs(localpeakis-peak0ti) if (np.diff(dt0is) == 0).any(): # two peaks equally close, pick sharpest one peak0ii = abs(localsharp[cii, localpeakis]).argmax() else: peak0ii = dt0is.argmin() # save primary peak for this cii dt0i = dt0is[peak0ii] if dt0i > sdti: # too distant in time tis[cii, 0] = peak0ti # use same t0i as maxchan else: # give it its own t0i tis[cii, 0] = localpeakis[peak0ii] # save 2ndary peak for this cii if len(localpeakis) == 1: # monophasic, set 2ndary peak same as primary tis[cii, 1] = tis[cii, 0] continue if peak0ti <= peak1ti: # primary peak comes first (more common case) peak1ii = min(peak0ii+1, lastpeakii) # 2ndary peak is 1 to the right else: # peak1ti < peak0ti, ie 2ndary peak comes first peak1ii = max(peak0ii-1, 0) # 2ndary peak is 1 to the left dt1is = abs(localpeakis-peak1ti) dt1i = dt1is[peak1ii] if dt1i > sdti: # too distant in time tis[cii, 1] = peak1ti # use same t1i as maxchan else: tis[cii, 1] = localpeakis[peak1ii] # based on maxchan (chani), find inclchanis, incltis, and inclwindow: inclchanis = self.inclnbhdi[chani] ninclchans = len(inclchanis) inclchans = self.chans[inclchanis] chan = self.chans[chani] inclchani = int(np.where(inclchans == chan)[0]) # != chani! inclciis = chanis.searchsorted(inclchanis) incltis = tis[inclciis] inclwindow = window[inclciis] if DEBUG: self.log("final window params: t0=%r, t1=%r, Vs=%r, peakts=\n%r" % (wave.ts[t0i], wave.ts[t1i], list(AD2uV(Vs)), wave.ts[t0i+tis])) if self.extractparamsondetect: # Get Vpp at each inclchan's tis, use as spatial weights: # see core.rowtake() or util.rowtake_cy() for indexing explanation: w = np.float32(inclwindow[np.arange(ninclchans)[:, None], incltis]) w = abs(w).sum(axis=1) x = self.siteloc[inclchanis, 0] # 1D array (row) y = self.siteloc[inclchanis, 1] params = weights2f(f, w, x, y, inclchani) if params == None: # presumably a non-localizable many-channel noise event if DEBUG: treject = intround(wave.ts[ti]) # nearest us self.log("reject spike at t=%d based on fit params" % treject) # no real need to lockout chans for a params-rejected spike continue # skip to next peak # build up spike record: s = spikes[nspikes] # wave.ts might be floats, depending on sampfreq s['t'] = intround(wave.ts[ti]) # nearest us # leave each spike's chanis in sorted order, as they are in self.inclnbhdi, # important assumption used later on, like in sort.get_wave() and # Neuron.update_wave() ts = wave.ts[t0i:t1i] # potentially floats # use ts = np.arange(s['t0'], s['t1'], stream.tres) to reconstruct s['t0'], s['t1'] = intround(wave.ts[t0i]), intround(wave.ts[t1i]) # nearest us s['tis'][:ninclchans] = incltis # wrt t0i=0 s['aligni'] = aligni # 0 or 1 s['dt'] = intround(abs(ts[tis[maxcii, 0]] - ts[tis[maxcii, 1]])) # nearest us s['V0'], s['V1'] = AD2uV(Vs) # in uV s['Vpp'] = AD2uV(Vpp) # in uV s['chan'], s['chans'][:ninclchans], s['nchans'] = chan, inclchans, ninclchans s['chani'] = inclchani nt = inclwindow.shape[1] # isn't always full width if recording has gaps wavedata[nspikes, :ninclchans, :nt] = inclwindow if self.extractparamsondetect: # Save spatial fit params, and lockout only the channels within lockrx*sx # of the fit spatial location of the spike, up to a max of self.inclr. s['x0'], s['y0'], s['sx'], s['sy'] = params x0, y0 = s['x0'], s['y0'] # lockout radius for this spike: lockr = min(self.lockrx*s['sx'], self.inclr) # in um # test y coords of inclchans in y array, ylockchaniis can be used to index # into x, y and inclchans: ylockchaniis, = np.where(np.abs(y - y0) <= lockr) # convert bool arr to int # test Euclid distance from x0, y0 for each ylockchani: lockchaniis = ylockchaniis.copy() for ylockchanii in ylockchaniis: if dist((x[ylockchanii], y[ylockchanii]), (x0, y0)) > lockr: lockchaniis = np.delete(lockchaniis, ylockchanii) # dist is too great lockchans = inclchans[lockchaniis] lockchanis = inclchanis[lockchaniis] nlockchans = len(lockchans) s['lockchans'][:nlockchans], s['nlockchans'] = lockchans, nlockchans # just for testing: #assert (lockchanis == self.chans.searchsorted(lockchans)).all() #assert (lockchaniis == chanis.searchsorted(lockchanis)).all() else: # in this case, the inclchans and lockchans fields are redundant s['lockchans'][:ninclchans], s['nlockchans'] = inclchans, ninclchans lockchanis = chanis lockchaniis = np.arange(ninclchans) # give each chan a distinct lockout, based on how each chan's # sharpest peaks line up with those of the maxchan. Respect existing lockouts: # on each of the relevant chans, keep whichever lockout ends last thislockout = t0i+tis.max(axis=1)[lockchaniis] lockouts[lockchanis] = np.max([lockouts[lockchanis], thislockout], axis=0) if DEBUG: self.log('lockouts=%r\nfor chans=%r' % (list(wave.ts[lockouts[lockchanis]]), list(self.chans[lockchanis]))) self.log('*** found new spike %d: t=%d chan=%d (%d, %d)' % (nspikes+self.nspikes, s['t'], chan, self.siteloc[chani, 0], self.siteloc[chani, 1])) nspikes += 1 # trim spikes and wavedata arrays down to size spikes.resize(nspikes, refcheck=False) wds = wavedata.shape wavedata.resize((nspikes, wds[1], wds[2]), refcheck=False) return spikes, wavedata
def Hom_loss(H, p, p_): Hp_ = H @ p Hp = inv(H) @ p_ return core.dist(Hp_,p_) + core.dist(Hp, p)