def pitjb_online_parser(v, p=None): if p is None: p = get_default_params() v_peak = p.online_vthresh peaks,ends = [],[] pe = 0,0 # peaktime, sacendtime tuple which we'll use # XXX: it's slow to iterate over the velocity like this, but it's the # easiest way to have code that does the same thing as what our # online-parser currently does for i in range(len(v)): if i == pe[1] - p.framelag: ends.append(pe[1]) peaks.append(pe[0]) if v[i] >p.online_vthresh: #print "Got higher than v_thresh" if v[i] > v_peak: v_peak = v[i] peaktime = i sacendtime = peaktime + timeleft_from_vpeak(v_peak, p.sacslope,p.sacintercept) sacendtime = int(sacendtime) pe = peaktime,sacendtime # communicate the predicted fixation position to visionegg display loop # just put a random position for now #fix_position[:] = np.random.rand() * 800, np.random.rand() * 600 #fix_position[:] = rbuf[i-1] #print "updating v_peak", v[i], " \t curtime:", i, "end time: ",sacendtime #peaks.append(i) else: # we're slower than threshold again, must be in a fixation epoch v_peak = p.online_vthresh return np.array(peaks),np.array(ends)
def pitjb_online_parser(v, p=None): if p is None: p = get_default_params() v_peak = p.online_vthresh peaks, ends = [], [] pe = 0, 0 # peaktime, sacendtime tuple which we'll use # XXX: it's slow to iterate over the velocity like this, but it's the # easiest way to have code that does the same thing as what our # online-parser currently does for i in range(len(v)): if i == pe[1] - p.framelag: ends.append(pe[1]) peaks.append(pe[0]) if v[i] > p.online_vthresh: #print "Got higher than v_thresh" if v[i] > v_peak: v_peak = v[i] peaktime = i sacendtime = peaktime + timeleft_from_vpeak( v_peak, p.sacslope, p.sacintercept) sacendtime = int(sacendtime) pe = peaktime, sacendtime # communicate the predicted fixation position to visionegg display loop # just put a random position for now #fix_position[:] = np.random.rand() * 800, np.random.rand() * 600 #fix_position[:] = rbuf[i-1] #print "updating v_peak", v[i], " \t curtime:", i, "end time: ",sacendtime #peaks.append(i) else: # we're slower than threshold again, must be in a fixation epoch v_peak = p.online_vthresh return np.array(peaks), np.array(ends)
def pitjb_online_parser_fast(v, p=None): """Online parser which extracts saccades based on peak velocity that is above some treshold. v : array velocity in *pixels* per sample XXX: not working at the moment """ from Dimstim.multipylink import timeleft_from_vpeak if p is None: p = get_default_params() v_over_thresh, = np.where(v > p.online_vthresh) tleft = timeleft_from_vpeak(v[v_over_thresh], p.sacslope,p.sacintercept).astype(np.int) ends = v_over_thresh+tleft # is the sample larger that all other velocities before the predicted end # time # XXX: this needs to be modified by our framelag / framerate, see # Dimstim.FixBarsAsync for details , in particular lines that include # either "ttl < 10 and ttl > 4" or "ttl < 5 and ttl >= 0" # this won't work - we'll get a ragged array depending on when stuff ends #mask = v[v_over_thresh] > v[v_over_thresh+np.arange(ends)].all() mask = [(v[b] > v[b+1:e]).all() for b,e in zip(v_over_thresh, ends)] mask = np.array(mask) # if the mask condition is false, that means we would have avoided this # sample, but if it's true, we still may have avoided this sample, if # there's a sample before it that caused us to make a prediction further # into the past. print(mask) print(v_over_thresh) if mask.sum() > 1: # now every peak thats at mask=True is the maximum until the end of its # predicted saccade. If there are any other peaks before this end, we # will ignore them (unless they occur after we've gone below # threshold), in which case we'll re-predict a new saccade. How do we # account for this in the code? what a headache! mask2 = ends[mask][:-1] < v_over_thresh[mask][1:] mask[1:][~mask2] = False print(mask2) return v_over_thresh[mask]
def pitjb_online_parser_fast(v, p=None): """Online parser which extracts saccades based on peak velocity that is above some treshold. v : array velocity in *pixels* per sample XXX: not working at the moment """ from Dimstim.multipylink import timeleft_from_vpeak if p is None: p = get_default_params() v_over_thresh, = np.where(v > p.online_vthresh) tleft = timeleft_from_vpeak(v[v_over_thresh], p.sacslope, p.sacintercept).astype(np.int) ends = v_over_thresh + tleft # is the sample larger that all other velocities before the predicted end # time # XXX: this needs to be modified by our framelag / framerate, see # Dimstim.FixBarsAsync for details , in particular lines that include # either "ttl < 10 and ttl > 4" or "ttl < 5 and ttl >= 0" # this won't work - we'll get a ragged array depending on when stuff ends #mask = v[v_over_thresh] > v[v_over_thresh+np.arange(ends)].all() mask = [(v[b] > v[b + 1:e]).all() for b, e in zip(v_over_thresh, ends)] mask = np.array(mask) # if the mask condition is false, that means we would have avoided this # sample, but if it's true, we still may have avoided this sample, if # there's a sample before it that caused us to make a prediction further # into the past. print(mask) print(v_over_thresh) if mask.sum() > 1: # now every peak thats at mask=True is the maximum until the end of its # predicted saccade. If there are any other peaks before this end, we # will ignore them (unless they occur after we've gone below # threshold), in which case we'll re-predict a new saccade. How do we # account for this in the code? what a headache! mask2 = ends[mask][:-1] < v_over_thresh[mask][1:] mask[1:][~mask2] = False print(mask2) return v_over_thresh[mask]