def click_estimate_amplitudes(self): idx = self.trace_idx data = self.cf.get_trace_data(idx) tattrs = self.cf.get_trace_attrs(idx) data = process_data(data, tattrs, key="_log") fs = decode(tattrs["samplingrate"]) nlat = int((decode(tattrs["neg_peak_latency_ms"]) or 0) * fs / 1000) plat = int((decode(tattrs["pos_peak_latency_ms"]) or 0) * fs / 1000) # MEP negative trials if nlat == plat: print("MEP negative or identical latencies", nlat, plat) tattrs["neg_peak_latency_ms"] = encode(0) tattrs["pos_peak_latency_ms"] = encode(0) tattrs["neg_peak_magnitude_uv"] = encode(0) tattrs["pos_peak_magnitude_uv"] = encode(0) else: pre = decode(tattrs["samples_pre_event"]) shift = decode(tattrs["onset_shift"]) or 0 namp = float(data[nlat + pre + shift]) pamp = float(data[plat + pre + shift]) print("Estimating amplitudes to be", namp, pamp) tattrs["neg_peak_magnitude_uv"] = encode(namp) tattrs["pos_peak_magnitude_uv"] = encode(pamp) self.cf.set_trace_attrs(idx, tattrs) self.draw_hasmep_button() self.callback()
def click_estimate_parameters(self): window = (15, 120) idx = self.trace_idx data = self.cf.get_trace_data(idx) tattrs = self.cf.get_trace_attrs(idx) data = process_data(data, tattrs, key="_log") pre = decode(tattrs["samples_pre_event"]) shift = decode(tattrs["onset_shift"]) or 0 fs = decode(tattrs["samplingrate"]) onset = pre - shift print(shift, fs) minlat = int(window[0] * fs / 1000) maxlat = int(window[1] * fs / 1000) a = onset + minlat b = onset + maxlat mep = data[a:b] nlat = mep.argmin() plat = mep.argmax() namp = float(mep[nlat]) pamp = float(mep[plat]) nlat = mep.argmin() * 1000 / fs plat = mep.argmax() * 1000 / fs print("Estimating latencies to be", nlat, plat) print("Estimating amplitudes to be", namp, pamp) tattrs["neg_peak_latency_ms"] = encode(float(nlat + window[0])) tattrs["neg_peak_magnitude_uv"] = encode(namp) tattrs["pos_peak_latency_ms"] = encode(float(plat + window[0])) tattrs["pos_peak_magnitude_uv"] = encode(pamp) self.cf.set_trace_attrs(idx, tattrs) self.draw_hasmep_button() self.callback()
def cli_peek(args: argparse.Namespace): from offspect.api import CacheFile, decode from offspect.cache.steps import process_data from collections import defaultdict import numpy as np cf = CacheFile(args.fname) print(cf) D: defaultdict = defaultdict(list) for ix, (data, attrs) in enumerate(cf): data = process_data(data, attrs, verbose=False) traceID = attrs["id"] D[traceID].append((data, ix)) # overlap = len([key for key, count in D.items() if len(count) > 1]) # print(f"{overlap} of {len(cf)} traces share the same id") if args.similarity is not None: for key, values in D.items(): if len(values) > 1: traces = [v[0] for v in values] idx = [v[1] + 1 for v in values] with np.errstate(invalid="ignore"): r = np.corrcoef(np.asanyarray(traces)) for row in range(len(traces)): for col in range(len(traces)): if row > col: coeff = r[row, col] if abs(coeff) > args.similarity: print( f"WARNING: traces [{idx[col]}, {idx[row]}] share ID {key} and are similar with r = {coeff:3.2f}" )
def plot_trace(self, cf, idx: int = 0): data = cf.get_trace_data(idx) attrs = cf.get_trace_attrs(idx) pre = decode(attrs["samples_pre_event"]) post = decode(attrs["samples_post_event"]) fs = decode(attrs["samplingrate"]) shift = decode(attrs["onset_shift"]) shift = shift or 0 t0 = -float(pre) / float(fs) t1 = float(post) / float(fs) nlat = decode(attrs["neg_peak_latency_ms"]) or 0.0 plat = decode(attrs["pos_peak_latency_ms"]) or 1.0 namp = decode(attrs["neg_peak_magnitude_uv"]) or 0.0 pamp = decode(attrs["pos_peak_magnitude_uv"]) or 0.0 nlat = int(nlat * float(fs) / 1000) plat = int(plat * float(fs) / 1000) if nlat < plat: amps = (namp, pamp) lats = (nlat, plat) else: amps = (pamp, namp) lats = (plat, nlat) try: # perform preprocessing steps data = process_data(data, attrs, key="_log") plot_trace_on(self.canvas.axes, data, t0, t1, pre, post, lats, amps, shift) print(f"PLOT: Plotting trace number {idx+1} shifted by {shift} samples") except Exception as e: print(e)
def save_tracedata(self): idx = self.ctrl.trace_idx data = self.cf.get_trace_data(idx) attrs = self.cf.get_trace_attrs(idx) data = process_data(data, attrs, key="_log") write_tracedata(self.cf, data, idx) attrs["_log"] = encode([]) self.cf.set_trace_attrs(idx, attrs) self.refresh() print( "APPLY: Applied all preprocessing steps and wrote them into the CacheFile for trace#", idx, )