def cut_traces(xdffile: FileName, annotation: Annotations) -> List[TraceData]: """cut the tracedate from a matfile given Annotations args ---- xdfile: FileName the xdffile for cutting the data. must correspond in name to the one specified in the annotation annotation: Annotations the annotations specifying e.g. onsets as well as pre and post durations returns ------- traces: List[TraceData] """ streams = XDFFile(xdffile) channel = decode(annotation["attrs"]["channel_of_interest"]) print("Selecting traces for channel", channel) datastream = pick_stream_with_channel(channel, streams) cix = datastream.channel_labels.index(channel) pre = decode(annotation["attrs"]["samples_pre_event"]) post = decode(annotation["attrs"]["samples_post_event"]) traces = [] for attrs in annotation["traces"]: onset = decode(attrs["event_sample"]) trace = datastream.time_series[onset - pre : onset + post, cix] bl = trace[0:pre].mean() trace -= bl traces.append(trace) return traces
def cut_traces(xdffile: FileName, annotation: Annotations) -> List[TraceData]: """cut the tracedate from a matfile given Annotations args ---- xdfile: FileName the xdffile for cutting the data. must correspond in name to the one specified in the annotation annotation: Annotations the annotations specifying e.g. onsets as well as pre and post durations returns ------- traces: List[TraceData] """ streams = XDFFile(xdffile) soi = decode(annotation["attrs"]["channel_of_interest"]) print("Selecting traces for stream", soi) datastream = streams[soi] pre = decode(annotation["attrs"]["samples_pre_event"]) post = decode(annotation["attrs"]["samples_post_event"]) traces = [] for attrs in annotation["traces"]: onset = decode(attrs["event_sample"]) trace = datastream.time_series[onset - pre:onset + post, :] traces.append(trace) return traces
def cut_traces(matfile: FileName, annotation: Annotations) -> List[TraceData]: """cut the tracedate from a matfile given Annotations args ---- matfile: FileName the original matfile. must correspond in name to the one specified in the annotation annotation: Annotations the annotations specifying e.g. onsets as well as pre and post durations returns ------- traces: List[TraceData] """ if Path(matfile).name != annotation["origin"]: raise ValueError( "Matfile does not correspond with original file. Fix manually if you plan to fork this annotations" ) content = convert_mat(matfile) # it is important to decode all values, because they are natively stored as strings only target_channel = decode(annotation["attrs"]["channel_of_interest"]) pre = decode(annotation["attrs"]["samples_pre_event"]) post = decode(annotation["attrs"]["samples_post_event"]) onsets = [decode(attr["event_sample"]) for attr in annotation["traces"]] traces = _cut_traces( content, target_channel=target_channel, pre_in_samples=pre, post_in_samples=post, onsets=onsets, ) return traces
def cut_traces(cntfile: FileName, annotation: Annotations) -> List[TraceData]: """cut the tracedate from a matfile given Annotations args ---- cntfile: FileName the cntfile for cutting the data. must correspond in name to the one specified in the annotation annotation: Annotations the annotations specifying e.g. onsets as well as pre and post durations returns ------- traces: List[TraceData] """ cnt = cnt_file(cntfile) pre = decode(annotation["attrs"]["samples_pre_event"]) post = decode(annotation["attrs"]["samples_post_event"]) cix = [cnt.get_channel_info(c)[0] for c in range(cnt.get_channel_count()) ].index(decode(annotation["attrs"]["channel_of_interest"])[0]) traces = [] for attrs in annotation["traces"]: onset = decode(attrs["event_sample"]) trace = cnt.get_samples(fro=onset - pre, to=onset + post) trace = np.asanyarray(trace)[:, cix] traces.append(trace) return traces
def plot_map(cachefiles: List[CacheFile], foo=lambda x: x, ignore_rejected=True, **kwargs): """plot the whole map for a complete cachefile args ---- cachefiles: List[CacheFile] a list of the cachefiles to be plotted foo: Callable will be applied to each value, and defaults to passing the original. But could be, e.g. lambda x : log10(x + 1) to plot logarithmized values ignore_rejected: bool defaults to True, and ignores any traces which have been flagged for rejection. Alternatively, ignore the rejection and plot their values anyways. **kwargs additional keyword arguments are passed to :py:func:`~.plot_glass` returns ------- display: the figure handle for the mapping plot """ coords = [] values = [] uninspected = 0.0 total = 0.0 for cf in cachefiles: total += len(cf) for trace, tattr in cf: if not ignore_rejected or not decode(tattr["reject"]): npk = decode(tattr["neg_peak_magnitude_uv"]) ppk = decode(tattr["pos_peak_magnitude_uv"]) if ppk is not None and npk is not None: val = ppk - npk else: val = 0 uninspected += 1.0 xyz = decode(tattr["xyz_coords"]) coords.append(xyz) values.append(val) rejected = total - len(values) print(f"This plot is based on {len(values)}/{total:3.0f} traces.") print(f"{rejected:3.0f} traces were rejected.") print(f"{uninspected:3.0f} traces were not inspected.") values = list(map(foo, values)) return plot_glass(coords, values, **kwargs)
def cut_traces_multifile(files, annotation: Annotations) -> List[TraceData]: """cut the tracedate from a matfile given Annotations args ---- xdfile: FileName the xdffile for cutting the data. must correspond in name to the one specified in the annotation annotation: Annotations the annotations specifying e.g. onsets as well as pre and post durations returns ------- traces: List[TraceData] """ channel = decode(annotation["attrs"]["channel_of_interest"]) print("Selecting traces for channel", channel) data_series = None data_stamps = None for streams in files: datastream = pick_stream_with_channel(channel, streams) cix = datastream.channel_labels.index(channel) if data_series is None: data_series = datastream.time_series data_stamps = datastream.time_stamps else: """
def process_data(data, attrs, key: str = "_log", delim: str = " on ", verbose: bool = True) -> TraceData: """return TraceData processed by the steps in the field indexed by key args ---- data: TraceData the tracedata attrs:TraceAttributes the traceattributes key: str which field is used for logging the processing steps returns ------- data: TraceData the date stored for this trace, but processed with the steps performed """ if key in attrs.keys(): log = decode(attrs[key]) for event in log: step, when = event.split(delim) if verbose: print("STEPS: Replaying", step, "from", when) data = PreProcessor[step](data, attrs) else: if verbose: print("No processing steps cached") return data
def plot_trace(ax, data, attrs): pre = decode(attrs["samples_pre_event"]) post = decode(attrs["samples_post_event"]) fs = decode(attrs["samplingrate"]) t0 = -float(pre) / float(fs) t1 = float(post) / float(fs) # plot data ax.plot([pre, pre], [-200, 200], ":r") ax.plot(data) ax.set_ylim(-200, 200) ax.grid(True, which="both") ax.set_xticks((0, pre, pre + post)) ax.set_xticklabels((t0, 0, t1)) ax.set_xticks(range(0, pre + post, (pre + post) // 10), minor=True) ax.tick_params(direction="in")
def cut_traces_multifile(files, annotation: Annotations) -> List[TraceData]: """cut the tracedate from a matfile given Annotations args ---- xdfile: FileName the xdffile for cutting the data. must correspond in name to the one specified in the annotation annotation: Annotations the annotations specifying e.g. onsets as well as pre and post durations returns ------- traces: List[TraceData] """ channel = decode(annotation["attrs"]["channel_of_interest"]) print("Selecting traces for channel", channel) data_series = None data_stamps = None for streams in files: datastream = pick_stream_with_channel(channel, streams) cix = datastream.channel_labels.index(channel) if data_series is None: data_series = datastream.time_series data_stamps = datastream.time_stamps else: data_series = np.concatenate((data_series, datastream.time_series), axis=0) data_stamps = np.concatenate((data_stamps, datastream.time_stamps), axis=0) pre = decode(annotation["attrs"]["samples_pre_event"]) post = decode(annotation["attrs"]["samples_post_event"]) traces = [] for attrs in annotation["traces"]: onset = decode(attrs["event_sample"]) trace = data_series[onset - pre:onset + post, cix] traces.append(trace) return traces
def linenoise(data, attrs): signal = data.copy() fs = decode(attrs["samplingrate"]) timestep = 1 / fs original_len = len(signal) filter_order = 100 while len(signal) < fs: signal = np.pad(signal, (1, 0), "constant", constant_values=(0)) fourier = np.fft.fft(signal) freq = np.fft.fftfreq(len(signal), d=timestep) fidx = int(np.where(freq == 50)[0][0]) fourier[fidx] = 0 signal = np.real(np.fft.ifft(fourier)) signal = signal[-original_len:] data[:] = signal return data
def baseline(data, attrs): pre = decode(attrs["samples_pre_event"]) shift = decode(attrs["onset_shift"]) or 0 bl = data[:pre + shift].mean(0) data = data - bl return data