def test_pick_events(): """Test pick events in a events ndarray.""" events = np.array([[1, 0, 1], [2, 1, 0], [3, 0, 4], [4, 4, 2], [5, 2, 0]]) assert_array_equal(pick_events(events, include=[1, 4], exclude=4), [[1, 0, 1], [3, 0, 4]]) assert_array_equal(pick_events(events, exclude=[0, 2]), [[1, 0, 1], [3, 0, 4]]) assert_array_equal(pick_events(events, include=[1, 2], step=True), [[1, 0, 1], [2, 1, 0], [4, 4, 2], [5, 2, 0]])
def save_epochs_as(eeg, preproc_type, events, participant, session_name): os.makedirs(data_dir + '/epochs/' + preproc_type, exist_ok=True) familiar_events = mne.pick_events(events, include=[5, 6, 7]) familiar_events = remove_duplicate_events(familiar_events) familiar_epochs = Epochs(eeg, familiar_events, tmin=-0.3, tmax=1, proj=True, detrend=0, preload=False, verbose=0).drop_bad() familiar_epochs = convert_epochs_float32(familiar_epochs) familiar_epochs.save(data_dir + '/epochs/' + preproc_type + '/familiar_' + participant + '_' + session_name + '-epo.fif', verbose=0) unfamiliar_events = mne.pick_events(events, include=[13, 14, 15]) unfamiliar_events = remove_duplicate_events(unfamiliar_events) unfamiliar_epochs = Epochs(eeg, unfamiliar_events, tmin=-0.3, tmax=1, proj=True, detrend=0, preload=False, verbose=0).drop_bad() unfamiliar_epochs = convert_epochs_float32(unfamiliar_epochs) unfamiliar_epochs.save(data_dir + '/epochs/' + preproc_type + '/unfamiliar_' + participant + '_' + session_name + '-epo.fif', verbose=0) noise_events = mne.pick_events(events, include=[17, 18, 19]) noise_events = remove_duplicate_events(noise_events) noise_epochs = Epochs(eeg, noise_events, tmin=-0.3, tmax=1, proj=True, detrend=0, preload=False, verbose=0).drop_bad() noise_epochs = convert_epochs_float32(noise_epochs) noise_epochs.save(data_dir + '/epochs/' + preproc_type + '/noise_' + participant + '_' + session_name + '-epo.fif', verbose=0) return familiar_epochs, unfamiliar_epochs, noise_epochs
def split_events_by_trialtype(events): devsA, devsB = range(111,117), range(211,217) VS_eve = mne.pick_events(events, include=range(100,220)) VS_eve = mne.merge_events(VS_eve, [100], 10, replace_events=True) VS_eve = mne.merge_events(VS_eve, [200], 20, replace_events=True) VS_eve = mne.merge_events(VS_eve, devsA, 11, replace_events=True) VS_eve = mne.merge_events(VS_eve, devsB, 21, replace_events=True) FB_eve = mne.pick_events(events, include=range(10,22)) eve_dict = dict(VS=VS_eve, FB=FB_eve) id_dict = dict(stdA=10, stdB=20, devA=11, devB=21) # now same for VS and FB return eve_dict, id_dict
def process_raw(self, raw, dataset): # find the events, first check stim_channels then annotations stim_channels = mne.utils._get_stim_channel( None, raw.info, raise_error=False) if len(stim_channels) > 0: events = mne.find_events(raw, shortest_event=0, verbose=False) else: events, _ = mne.events_from_annotations(raw, verbose=False) channels = () if self.channels is None else self.channels # picks channels picks = mne.pick_types(raw.info, eeg=True, stim=False, include=channels) # get event id event_id = self.used_events(dataset) # pick events, based on event_id try: events = mne.pick_events(events, include=list(event_id.values())) except RuntimeError: # skip raw if no event found return # get interval tmin = self.tmin + dataset.interval[0] if self.tmax is None: tmax = dataset.interval[1] else: tmax = self.tmax + dataset.interval[0] X = [] for bandpass in self.filters: fmin, fmax = bandpass # filter data raw_f = raw.copy().filter(fmin, fmax, method='iir', picks=picks, verbose=False) # epoch data epochs = mne.Epochs(raw_f, events, event_id=event_id, tmin=tmin, tmax=tmax, proj=False, baseline=None, preload=True, verbose=False, picks=picks, on_missing='ignore') if self.resample is not None: epochs = epochs.resample(self.resample) # rescale to work with uV X.append(dataset.unit_factor * epochs.get_data()) inv_events = {k: v for v, k in event_id.items()} labels = np.array([inv_events[e] for e in epochs.events[:, -1]]) # if only one band, return a 3D array, otherwise return a 4D if len(self.filters) == 1: X = X[0] else: X = np.array(X).transpose((1, 2, 3, 0)) metadata = pd.DataFrame(index=range(len(labels))) return X, labels, metadata
def Add_condition_tag(events): import numpy as np import mne # Create the tag Vector Mod_tag = 0 Mod = 0 for i in range(len(events)): if events[i, 2] == 21: Mod = 0 if events[i, 2] == 22: Mod = 1 if events[i, 2] == 23: Mod = 2 Mod_tag = np.append(Mod_tag, Mod) Mod_tag = np.delete(Mod_tag, 0) # Stack with the event matrix events_taged = np.hstack((events, Mod_tag[:, None])) events_taged = mne.pick_events(events_taged, include=[31, 32, 33, 34]) return events_taged
def split_events_by_trialtype(events, condition='VS'): if 'VS' in condition: devsA, devsB = range(111,117), range(211,217) VS_eve = mne.pick_events(events, include=range(100,220)) # Since Sep 2015, no longer do any of this replacement crap, # and keep the devs separate for location. Use the evoked_categories # dictionary to apply combination logic to the triggers. # VS_eve = mne.merge_events(VS_eve, [100], 10, replace_events=True) # VS_eve = mne.merge_events(VS_eve, [200], 20, replace_events=True) # # # Don't replace the deviants, make a copy instead! # VS_eve = mne.merge_events(VS_eve, devsA, 11, replace_events=True) # VS_eve = mne.merge_events(VS_eve, devsB, 21, replace_events=True) ########### # NB! The problem with this is that each of the events # gets turned into an epoch later, so we get duplication. # Consider NOT merging the events to get 11 and 21? # Will then have to write some logic later to combine the 11x and 21x # This hack is needed to get both 11/21's and 11N/21N's together! # tmp = mne.pick_events(events, include=devsA+devsB) # #tmp[:,0] += 1 # add a ms # VS_eve = np.concatenate((VS_eve, tmp), axis=0) # VS_eve = VS_eve[np.argsort(VS_eve[:, 0])] ########### FB_eve = mne.pick_events(events, include=range(10,22)) eve_dict = dict(VS=VS_eve, FB=FB_eve) elif 'FFA' in condition: FFA_eve = mne.pick_events(events, include=[100, 150, 200]) eve_dict = dict(FFA=FFA_eve) id_dict = dict(VS=dict(stdA=100, stdB=200, A1=111, A2=112,A3=113,A4=114,A5=115,A6=116, B1=211, B2=212,B3=213,B4=214,B5=215,B6=216), FB=dict(stdA=10, stdB=20, devA=11, devB=21), FFA=dict(A=100, B=200, blur=150)) return eve_dict, id_dict
def test_event_colors(): """Test color assignment.""" events = pick_events(_get_events(), include=[1, 2]) unique_events = set(events[:, 2]) # make sure defaults work colors = _handle_event_colors(None, unique_events, dict()) default_colors = _get_color_list() assert colors[1] == default_colors[0] # make sure custom color overrides default colors = _handle_event_colors(color_dict=dict(foo='k', bar='#facade'), unique_events=unique_events, event_id=dict(foo=1, bar=2)) assert colors[1] == 'k' assert colors[2] == '#facade'
def eventreader(raw, subject, event_dict, df, outputdir="/tmp/"): """ Find all events and repair them, if necessary. :param raw: :param subject: str, subject identifier in the form of '001' :param event_dict: dict, trigger name associations :param outputdir: str, path to where diagnostic figures are saved :param df: dataframe, contains logging information about the experiment :return: """ events = mne.find_events( raw, min_duration=0.002, # ignores spurious events uint_cast=True, # workaround Elekta acquisition bug, causes neg. values stim_channel=["STI101", "STI102", "STI016"], # get all triggers consecutive=True, # Trigger are overlayed by photodiode signals, sadly ) # remove events that are known to be spurious. It is not a problem if they # are only present in some subjects, we can specify a group-level list here # 5 was photodiode onset, but it is not relevant for us exclusion_list = [254, 32768, 5] events = mne.pick_events(events, exclude=exclusion_list) # remove any signals obfuscated by a photodiode events = repair_triggers(events=events, log_df=df) # plot events. This works without raw data fig = mne.viz.plot_events( events, sfreq=raw.info["sfreq"], first_samp=raw.first_samp, event_id=event_dict, on_missing="warn", ) fig.suptitle("Full event protocol for {} ({})".format( raw.info["subject_info"]["first_name"], raw.info["subject_info"]["last_name"], )) fpath = _construct_path([ Path(outputdir), f"sub-{subject}", "meg", f"sub-{subject}_task-memento_eventplot.png", ]) fig.savefig(str(fpath)) return events
def test_read_epochs(): """Test reading epochs from raw files """ epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0)) epochs.average() data = epochs.get_data() epochs_no_id = Epochs(raw, pick_events(events, include=event_id), None, tmin, tmax, picks=picks, baseline=(None, 0)) assert_array_equal(data, epochs_no_id.get_data()) eog_picks = fiff.pick_types(raw.info, meg=False, eeg=False, stim=False, eog=True) epochs.drop_picks(eog_picks) data_no_eog = epochs.get_data() assert_true(data.shape[1] == (data_no_eog.shape[1] + len(eog_picks)))
dat.info['lowpass'] = round(dat.info['lowpass'], 8) dat.info['highpass'] = round(dat.info['highpass'], 8) dat.info['line_freq'] = 60 # Extract trigger event data from EEG annotations try: annot = mne.read_annotations(filepath) if len(annot) > 10: dat.set_annotations(annot) events, e_id = mne.events_from_annotations(dat, event_id=event_map) orig_time = dat.annotations.orig_time else: events = mne.find_events(dat, shortest_event=1, mask=65280, mask_type="not_and") orig_time = dat.info['meas_date'] events = mne.pick_events(events, include=list(event_map.values())) annot_new = mne.annotations_from_events( events=events, sfreq=dat.info['sfreq'], orig_time=orig_time, event_desc=event_name_map, verbose=False ) dat.set_annotations(annot_new) except (ValueError, RuntimeError): print(" * Unable to find any valid triggers, skipping...\n") continue # Acutally write out BIDS data write_raw_bids(dat, bids_path, verbose=False) # Update sidecar files with correct metadata json_path = BIDSPath(subject=study_id, task=taskname, suffix='eeg', extension='.json', root=bids_root)
raw = ica.apply(raw, exclude=comp) filter_picks = mne.pick_types(raw.info, meg=True, ref_meg=True, stim=False, eog=False, misc=False) raw = raw.filter(None, 45, method="fir", phase="zero-double", n_jobs=-1, picks=filter_picks) epo_file = op.join(subj_path, "lp45-{}-epo.fif".format(file_n.split("-")[1])) onsets = mne.pick_events(events, exclude=[0]) epochs = mne.Epochs(raw, onsets, tmin=-0.1, tmax=0.4, baseline=None) epochs.save(epo_file) named_tuple = time.localtime() # get struct_time time_string = time.strftime("%m/%d/%Y, %H:%M:%S", named_tuple) print("applying ICA, filtering and epoching done:", time_string) # use: ipython --gui=qt5 head_coregistration.py and head_coreg_check.py manually # to produce -trans.fif files for source localisation
# be dropped as the browser window is closed. # # It is possible to plot event markers on epoched data by passing ``events`` # keyword to the epochs plotter. The events are plotted as vertical lines and # they follow the same coloring scheme as :func:`mne.viz.plot_events`. The # events plotter gives you all the events with a rough idea of the timing. # Since the colors are the same, the event plotter can also function as a # legend for the epochs plotter events. It is also possible to pass your own # colors via ``event_colors`` keyword. Here we can plot the reaction times # between seeing the smiley face and the button press (event 32). # # When events are passed, the epoch numbering at the bottom is switched off by # default to avoid overlaps. You can turn it back on via settings dialog by # pressing `o` key. You should check out `help` at the lower left corner of the # window for more information about the interactive features. events = mne.pick_events(events, include=[5, 32]) mne.viz.plot_events(events) epochs['smiley'].plot(events=events) ############################################################################### # To plot individual channels as an image, where you see all the epochs at one # glance, you can use function :func:`mne.Epochs.plot_image`. It shows the # amplitude of the signal over all the epochs plus an average (evoked response) # of the activation. We explicitly set interactive colorbar on (it is also on # by default for plotting functions with a colorbar except the topo plots). In # interactive mode you can scale and change the colormap with mouse scroll and # up/down arrow keys. You can also drag the colorbar with left/right mouse # button. Hitting space bar resets the scale. epochs.plot_image(278, cmap='interactive') ###############################################################################
def process_raw(self, raw, dataset, return_epochs=False): # noqa: C901 # find the events, first check stim_channels then annotations stim_channels = mne.utils._get_stim_channel(None, raw.info, raise_error=False) if len(stim_channels) > 0: events = mne.find_events(raw, shortest_event=0, verbose=False) else: events, _ = mne.events_from_annotations(raw, verbose=False) channels = () if self.channels is None else self.channels # picks channels picks = mne.pick_types(raw.info, eeg=True, stim=False, include=channels) # get event id event_id = self.used_events(dataset) # pick events, based on event_id try: if type(event_id["Target"]) is list and type( event_id["NonTarget"]) == list: event_id_new = dict(Target=1, NonTarget=0) events = mne.merge_events(events, event_id["Target"], 1) events = mne.merge_events(events, event_id["NonTarget"], 0) event_id = event_id_new events = mne.pick_events(events, include=list(event_id.values())) except RuntimeError: # skip raw if no event found return # get interval tmin = self.tmin + dataset.interval[0] if self.tmax is None: tmax = dataset.interval[1] else: tmax = self.tmax + dataset.interval[0] X = [] for bandpass in self.filters: fmin, fmax = bandpass # filter data raw_f = raw.copy().filter(fmin, fmax, method="iir", picks=picks, verbose=False) # epoch data baseline = self.baseline if baseline is not None: baseline = ( self.baseline[0] + dataset.interval[0], self.baseline[1] + dataset.interval[0], ) bmin = baseline[0] if baseline[0] < tmin else tmin bmax = baseline[1] if baseline[1] > tmax else tmax else: bmin = tmin bmax = tmax epochs = mne.Epochs( raw_f, events, event_id=event_id, tmin=bmin, tmax=bmax, proj=False, baseline=baseline, preload=True, verbose=False, picks=picks, on_missing="ignore", ) if bmin < tmin or bmax > tmax: epochs.crop(tmin=tmin, tmax=tmax) if self.resample is not None: epochs = epochs.resample(self.resample) # rescale to work with uV if return_epochs: X.append(epochs) else: X.append(dataset.unit_factor * epochs.get_data()) inv_events = {k: v for v, k in event_id.items()} labels = np.array([inv_events[e] for e in epochs.events[:, -1]]) if return_epochs: X = mne.concatenate_epochs(X) elif len(self.filters) == 1: # if only one band, return a 3D array X = X[0] else: # otherwise return a 4D X = np.array(X).transpose((1, 2, 3, 0)) metadata = pd.DataFrame(index=range(len(labels))) return X, labels, metadata
def test_read_write_epochs(): """Test epochs from raw files with IO as fif file """ epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0)) evoked = epochs.average() data = epochs.get_data() epochs_no_id = Epochs(raw, pick_events(events, include=event_id), None, tmin, tmax, picks=picks, baseline=(None, 0)) assert_array_equal(data, epochs_no_id.get_data()) eog_picks = fiff.pick_types(raw.info, meg=False, eeg=False, stim=False, eog=True, exclude="bads") epochs.drop_picks(eog_picks) assert_true(len(epochs.info["chs"]) == len(epochs.ch_names) == epochs.get_data().shape[1]) data_no_eog = epochs.get_data() assert_true(data.shape[1] == (data_no_eog.shape[1] + len(eog_picks))) # test decim kwarg with warnings.catch_warnings(record=True) as w: epochs_dec = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), decim=4) assert_equal(len(w), 1) data_dec = epochs_dec.get_data() assert_array_equal(data[:, :, epochs_dec._decim_idx], data_dec) evoked_dec = epochs_dec.average() assert_array_equal(evoked.data[:, epochs_dec._decim_idx], evoked_dec.data) n = evoked.data.shape[1] n_dec = evoked_dec.data.shape[1] n_dec_min = n // 4 assert_true(n_dec_min <= n_dec <= n_dec_min + 1) assert_true(evoked_dec.info["sfreq"] == evoked.info["sfreq"] / 4) # test IO epochs.save(op.join(tempdir, "test-epo.fif")) epochs_read = read_epochs(op.join(tempdir, "test-epo.fif")) assert_array_almost_equal(epochs_read.get_data(), epochs.get_data()) assert_array_equal(epochs_read.times, epochs.times) assert_array_almost_equal(epochs_read.average().data, evoked.data) assert_equal(epochs_read.proj, epochs.proj) bmin, bmax = epochs.baseline if bmin is None: bmin = epochs.times[0] if bmax is None: bmax = epochs.times[-1] baseline = (bmin, bmax) assert_array_almost_equal(epochs_read.baseline, baseline) assert_array_almost_equal(epochs_read.tmin, epochs.tmin, 2) assert_array_almost_equal(epochs_read.tmax, epochs.tmax, 2) assert_equal(epochs_read.event_id, epochs.event_id) epochs.event_id.pop("1") epochs.event_id.update({"a": 1}) epochs.save(op.join(tempdir, "foo-epo.fif")) epochs_read2 = read_epochs(op.join(tempdir, "foo-epo.fif")) assert_equal(epochs_read2.event_id, epochs.event_id) # add reject here so some of the epochs get dropped epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), reject=reject) epochs.save(op.join(tempdir, "test-epo.fif")) # ensure bad events are not saved epochs_read3 = read_epochs(op.join(tempdir, "test-epo.fif")) assert_array_equal(epochs_read3.events, epochs.events) data = epochs.get_data() assert_true(epochs_read3.events.shape[0] == data.shape[0]) # test copying loaded one (raw property) epochs_read4 = epochs_read3.copy() assert_array_almost_equal(epochs_read4.get_data(), data)
def get_data(self, dataset, subjects=None, verbose=False): """Get data from dataset with selected subjects. Parameters ---------- dataset : Dataset instance. Dataset. subjects : None|list of int, optional Selected subject ids, if None, use all subjects in dataset. verbose : bool, optional Print processing information. Returns ------- Xs : dict An dict of selected events data X. ys : dict An dict of selected events label y. metas : dict An dict of selected events metainfo meta. """ if not self.is_valid(dataset): raise TypeError( "Dataset {:s} is not valid for the current paradigm. Check your events and channels settings" .format(dataset.code)) data = dataset.get_data(subjects) # # events, interval checking used_events, used_intervals = self._map_events_intervals(dataset) Xs = {} ys = {} metas = {} for subject, sessions in data.items(): for session, runs in sessions.items(): for run, raw in runs.items(): # do raw hook caches = {} if self._raw_hook: raw, caches = self._raw_hook(raw, caches) # pick selected channels by order channels = dataset.channels if self.select_channels is None else self.select_channels # picks = mne.pick_channels(raw.ch_names, channels, ordered=True) picks = pick_channels(raw.ch_names, channels, ordered=True) # find available events, first check stim_channels then annotations stim_channels = mne.utils._get_stim_channel( None, raw.info, raise_error=False) if len(stim_channels) > 0: events = mne.find_events(raw, shortest_event=0, initial_event=True) else: # convert event_id to its number type instead of default auto-renaming in 0.19.2 events, _ = mne.events_from_annotations( raw, event_id=(lambda x: int(x))) for event_name in used_events.keys(): # mne.pick_events returns any matching events in include # only raise Runtime Error when nothing is found # then we just skip this event try: selected_events = mne.pick_events( events, include=used_events[event_name]) except RuntimeError: continue # transform Raw to Epochs epochs = mne.Epochs( raw, selected_events, event_id={event_name: used_events[event_name]}, event_repeated='drop', tmin=used_intervals[event_name][0], tmax=used_intervals[event_name][1] - 1. / raw.info['sfreq'], picks=picks, proj=False, baseline=None, preload=True) # do epochs hook if self._epochs_hook: epochs, caches = self._epochs_hook(epochs, caches) # FIXME: is this resample reasonable? if self.srate: # as MNE suggested, decimate after extract epochs # low-pass raw object in raw_hook to prevent aliasing problem epochs = epochs.resample(self.srate) # epochs = epochs.decimate(dataset.srate//self.srate) # retrieve X, y and meta X = epochs.get_data() y = epochs.events[:, -1] meta = pd.DataFrame({ "subject": [subject] * len(epochs), "session": [session] * len(epochs), "run": [run] * len(epochs), }) # do data hook if self._data_hook: X, y, meta, caches = self._data_hook( X, y, meta, caches) # collecting data pre_X = Xs.get(event_name) if pre_X is not None: Xs[event_name] = np.concatenate((pre_X, X), axis=0) else: Xs[event_name] = X pre_y = ys.get(event_name) if pre_y is not None: ys[event_name] = np.concatenate((pre_y, y), axis=0) else: ys[event_name] = y pre_meta = metas.get(event_name) if pre_meta is not None: metas[event_name] = pd.concat((pre_meta, meta), axis=0, ignore_index=True) else: metas[event_name] = meta return Xs, ys, metas
def process_raw(self, raw, dataset): """ Process one raw data file. This function apply the preprocessing and eventual epoching on the individual run, and return the data, labels and a dataframe with metadata. metadata is a dataframe with as many row as the length of the data and labels. Parameters ---------- raw: mne.Raw instance the raw EEG data. dataset : dataset instance The dataset corresponding to the raw file. mainly use to access dataset specific information. returns ------- X : np.ndarray the data that will be used as features for the model labels: np.ndarray the labels for training / evaluating the model metadata: pd.DataFrame A dataframe containing the metadata """ # find the events events = mne.find_events(raw, shortest_event=0, verbose=False) channels = () if self.channels is None else self.channels # picks channels picks = mne.pick_types(raw.info, eeg=True, stim=False, include=channels) # get events id event_id = self.used_events(dataset) # pick events, based on event_id try: events = mne.pick_events(events, include=list(event_id.values())) except RuntimeError: # skip raw if no event found return # get interval tmin = self.tmin + dataset.interval[0] if self.tmax is None: tmax = dataset.interval[1] else: tmax = self.tmax + dataset.interval[0] X = [] for bandpass in self.filters: fmin, fmax = bandpass # filter data raw_f = raw.copy().filter(fmin, fmax, method='iir', picks=picks, verbose=False) # epoch data epochs = mne.Epochs(raw_f, events, event_id=event_id, tmin=tmin, tmax=tmax, proj=False, baseline=None, preload=True, verbose=False, picks=picks, on_missing='ignore') if self.resample is not None: epochs = epochs.resample(self.resample) # MNE is in V, rescale to have uV X.append(1e6 * epochs.get_data()) inv_events = {k: v for v, k in event_id.items()} labels = np.array([inv_events[e] for e in epochs.events[:, -1]]) # if only one band, return a 3D array, otherwise return a 4D if len(self.filters) == 1: X = X[0] else: X = np.array(X).transpose((1, 2, 3, 0)) metadata = pd.DataFrame(index=range(len(labels))) return X, labels, metadata
def Event_correction(N_S, N_E, events): import mne import numpy as np # Exclude this tag from the events events = mne.pick_events(events, exclude=65536) # Counts the events =================================================== # Initialize vars Events_code = np.zeros([len(events[:, 2]), 2], dtype=int) Events_code[:, 0] = events[:, 2] Events_uniques = np.unique(Events_code[:, 0]) Event_count = np.zeros([len(Events_uniques), 2], dtype=int) Event_count[:, 0] = Events_uniques # Count a = 0 for i in Events_uniques: Event_count[a, 1] = len( np.extract(Events_code == Events_uniques[a], Events_code)) a = a + 1 # CHECK EVETS ========================================================= # WORKS ONLY IF ARE NOT 2 CONSECUTIVES TAGS MISSING # TODO: Check iteratibly until no tags were missing. # ============================================================================= Warnings = 0 # Warning missing code Warnings_code = [0] # Warning positions Warnings_pos = [0] for i in range(len(Events_code)): Events_code[i, 1] = i # Check Tags code = 31 32 33 34 # Find the star mark if Events_code[i, 0] == 42: # If the next mark is the tag is OK if (Events_code[i + 1, 0] == 31 or Events_code[i + 1, 0] == 32 or Events_code[i + 1, 0] == 33 or Events_code[i + 1, 0] == 34): # Do nothing pass # If the next mark is the mark for concentration the tag is missing elif Events_code[i + 1, 0] == 44: min_tag = min(Event_count[Event_count[:, 0] == 31, 1], Event_count[Event_count[:, 0] == 32, 1], Event_count[Event_count[:, 0] == 33, 1], Event_count[Event_count[:, 0] == 34, 1]) miss_tag = Event_count[Event_count[:, 1] == min_tag, 0] print('Warnings, miss ' + str(miss_tag) + ' at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, miss_tag) Warnings_pos = np.append(Warnings_pos, i) # Check start code=42 if Events_code[i, 0] == 46: if (Events_code[i + 1, 0] == 42 or Events_code[i + 1, 0] == 16 or Events_code[i + 1, 0] == 17): pass else: print('Warning, miss start at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 42) Warnings_pos = np.append(Warnings_pos, i) if (Events_code[i, 0] == 21 or Events_code[i, 0] == 22 or Events_code[i, 0] == 23): if Events_code[i + 1, 0] == 42: pass else: print('Warning, miss start at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 42) Warnings_pos = np.append(Warnings_pos, i) if (Events_code[i, 0] == 61 or Events_code[i, 0] == 62 or Events_code[i, 0] == 63 or Events_code[i, 0] == 64): if (Events_code[i + 1, 0] == 42 or Events_code[i + 1, 0] == 16): pass else: print('Warning, miss start at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 42) Warnings_pos = np.append(Warnings_pos, i) # Check code 44 if (Events_code[i, 0] == 31 or Events_code[i, 0] == 32 or Events_code[i, 0] == 33 or Events_code[i, 0] == 34): if Events_code[i + 1, 0] == 44: pass elif Events_code[i + 1, 0] == 45: print('Warning, miss Usefull interval at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 44) Warnings_pos = np.append(Warnings_pos, i) # Check code 45 if Events_code[i, 0] == 44: if Events_code[i + 1, 0] == 45: pass elif Events_code[i + 1, 0] == 46: print('Warning, miss Concentration interval at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 45) Warnings_pos = np.append(Warnings_pos, i) # Check Code 46 if Events_code[i, 0] == 45: if Events_code[i + 1, 0] == 46: pass elif Events_code[i + 1, 0] == 42: print('Warning, miss Rest interval at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 46) Warnings_pos = np.append(Warnings_pos, i) # ============================================================================= # Correcting the events============================ if Warnings == 0: print('No Warnings, no corrections of events') Events_code_fix = Events_code Corrected_events = events else: print("Correcting Events") # Adding missing values Warnings_code = np.delete(Warnings_code, 0) Warnings_pos = np.delete(Warnings_pos, 0) corrections = np.zeros([Warnings, 3], dtype=int) corrections[:, 2] = Warnings_code for i in range(len(corrections)): # If a 42 is missing, I was seaching after a 46 if corrections[i, 2] == 42: corrections[i, 0] = events[Warnings_pos[i], 0] + 1945 # If a 44 is missing, I was seaching after a 45 elif corrections[i, 2] == 44: corrections[i, 0] = events[Warnings_pos[i], 0] + 2594 #If a 44 is missing, I was seaching after a 46 elif corrections[i, 2] == 45: corrections[i, 0] = events[Warnings_pos[i], 0] + 1075 # If a 46 is missing, I was seaching after a 45 un 46 elif corrections[i, 2] == 46: corrections[i, 0] = events[Warnings_pos[i], 0] + 1075 # If a tag is missing, I was seaching after a 42 elif corrections[i, 2] == miss_tag: corrections[i, 0] = events[Warnings_pos[i], 0] + 563 #Append the missing events Corrected_events = np.append(events, corrections, axis=0) # Sort the events by the time stamp Corrected_events = Corrected_events[Corrected_events[:, 0].argsort()] #CHECK CORRECTED EVETS =============================================== Warnings = 0 Warnings_code = [0] Warnings_pos = [0] Events_code_fix = np.zeros([len(Corrected_events[:, 2]), 2], dtype=int) Events_code_fix[:, 0] = Corrected_events[:, 2] for i in range(len(Events_code_fix)): Events_code_fix[i, 1] = i # Check Tags code= 31 32 33 34 # Find the star mark if Events_code_fix[i, 0] == 42: # If the next mark is the tag is OK if (Events_code_fix[i + 1, 0] == 31 or Events_code_fix[i + 1, 0] == 32 or Events_code_fix[i + 1, 0] == 33 or Events_code_fix[i + 1, 0] == 34): pass # If the next mark is the mark for concentration the tag is missing elif Events_code_fix[i + 1, 0] == 44: # WORKS ONLY IF JUST ONE TAG IS MISSING !!!! min_tag = min(Event_count[Event_count[:, 0] == 31, 1], Event_count[Event_count[:, 0] == 32, 1], Event_count[Event_count[:, 0] == 33, 1], Event_count[Event_count[:, 0] == 34, 1]) miss_tag = Event_count[Event_count[:, 1] == min_tag, 0] print('Warnings, miss ' + str(miss_tag) + ' at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, miss_tag) Warnings_pos = np.append(Warnings_pos, i) # Check start code=42 if Events_code_fix[i, 0] == 46: if (Events_code_fix[i + 1, 0] == 42 or Events_code_fix[i + 1, 0] == 16 or Events_code_fix[i + 1, 0] == 17): pass else: print('Warning, miss start at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 42) Warnings_pos = np.append(Warnings_pos, i) if (Events_code_fix[i, 0] == 21 or Events_code_fix[i, 0] == 22 or Events_code_fix[i, 0] == 23): if Events_code_fix[i + 1, 0] == 42: pass else: print('Warning, miss start at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 42) Warnings_pos = np.append(Warnings_pos, i) if (Events_code_fix[i, 0] == 61 or Events_code_fix[i, 0] == 62 or Events_code_fix[i, 0] == 63 or Events_code_fix[i, 0] == 64): if (Events_code_fix[i + 1, 0] == 42 or Events_code_fix[i + 1, 0] == 16): pass else: print('Warning, miss start at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 42) Warnings_pos = np.append(Warnings_pos, i) # Check usefull code 44 if (Events_code_fix[i, 0] == 31 or Events_code_fix[i, 0] == 32 or Events_code_fix[i, 0] == 33 or Events_code_fix[i, 0] == 34): if Events_code_fix[i + 1, 0] == 44: pass elif Events_code_fix[i + 1, 0] == 45: print('Warning, miss Usefull interval at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 44) Warnings_pos = np.append(Warnings_pos, i) # Check Usefull code 45 if Events_code_fix[i, 0] == 44: if Events_code_fix[i + 1, 0] == 45: pass elif Events_code_fix[i + 1, 0] == 46: print('Warning, miss Concentration interval at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 45) Warnings_pos = np.append(Warnings_pos, i) # Code 46 if Events_code_fix[i, 0] == 45: if Events_code_fix[i + 1, 0] == 46: pass elif Events_code_fix[i + 1, 0] == 42: print('Warning, miss Rest interval at i = ' + str(i)) Warnings = Warnings + 1 Warnings_code = np.append(Warnings_code, 46) Warnings_pos = np.append(Warnings_pos, i) # correcting the events if Warnings == 0: print('Taggs OK') # CHECK FIXED EVENTS # Initialize vars Events_uniques_fix = np.unique(Events_code_fix[:, 0]) Event_count_fix = np.zeros([len(Events_uniques_fix), 2], dtype=int) Event_count_fix[:, 0] = Events_uniques_fix # Count the events a = 0 for i in Events_uniques_fix: Event_count_fix[a, 1] = len( np.extract(Events_code_fix[:, 0] == Events_uniques_fix[a], Events_code_fix[:, 0])) a = a + 1 # SANITY CHECK AFTER FIX EVETS =============================================== # Check if the Begining, End, Baseline start and end are OK if (Event_count_fix[Event_count_fix[:, 0] == 11, 1] == 1 and Event_count_fix[Event_count_fix[:, 0] == 12, 1] == 1 and Event_count_fix[Event_count_fix[:, 0] == 13, 1] == 1 and Event_count_fix[Event_count_fix[:, 0] == 14, 1] == 1): print('Start OK') else: print('Warning Start') # Check if blocks are OK if Event_count_fix[Event_count_fix[:, 0] == 15, 1] == Event_count_fix[ Event_count_fix[:, 0] == 16, 1] == Event_count_fix[Event_count_fix[:, 0] == 51, 1] + 1: print('Blocks OK') else: print('Warning Blocks') # Check if Tags OK if Event_count_fix[Event_count_fix[:, 0] == 31, 1] == Event_count_fix[ Event_count_fix[:, 0] == 32, 1] == Event_count_fix[ Event_count_fix[:, 0] == 33, 1] == Event_count_fix[Event_count_fix[:, 0] == 34, 1]: print('Tags OK') else: print('Warning Tags') # Check if Marks OK if Event_count_fix[Event_count_fix[:, 0] == 42, 1] == Event_count_fix[ Event_count_fix[:, 0] == 44, 1] == Event_count_fix[ Event_count_fix[:, 0] == 45, 1] == Event_count_fix[Event_count_fix[:, 0] == 46, 1]: print('Marks OK') else: print('Warning Maks') return Corrected_events
high_freq, method="fir", phase="minimum", n_jobs=-1, picks=filter_picks ) raw, events = raw.copy().resample( 250, npad="auto", events=events, n_jobs=-1, ) # ANNOTATIONS TO EXCLUDE JOYSTICK PARTS FROM ICA FITTING onsets_p2 = mne.pick_events(events, include=list(np.arange(10,18))) annot_onset = ((onsets_p2[:,0] - raw.first_samp) / raw.info["sfreq"]) - 2 duration = np.array([4.0] * annot_onset.shape[0]) description = np.array(["bad_joystick_movement"] * annot_onset.shape[0]) annotations = mne.Annotations( annot_onset, duration, description ) raw.set_annotations(annotations) # ICA n_components = 50 method = "fastica" max_iter = 10000
def _get_single_subject_data(self, dataset, subject_id, verbose=False): """Return data in micro-volt. """ if not self.is_valid(dataset): raise TypeError( "Dataset {:s} is not valid for the current paradigm. Check your events and channels settings" .format(dataset.dataset_code)) # data = dataset.get_data(subjects) # # events, interval checking used_events, used_intervals = self._map_events_intervals(dataset) Xs = {} ys = {} metas = {} data = dataset.get_data([subject_id]) for subject, sessions in data.items(): for session, runs in sessions.items(): for run, raw in runs.items(): # do raw hook either self-implemented or dataset inherited caches = {} if self._raw_hook: raw, caches = self._raw_hook(raw, caches) elif hasattr(dataset, 'raw_hook'): raw, caches = dataset.raw_hook(raw, caches) # pick selected channels by order channels = dataset.channels if self.select_channels is None else self.select_channels # picks = mne.pick_channels(raw.ch_names, channels, ordered=True) picks = pick_channels(raw.ch_names, channels, ordered=True) # find available events, first check stim_channels then annotations stim_channels = mne.utils._get_stim_channel( None, raw.info, raise_error=False) if len(stim_channels) > 0: events = mne.find_events(raw, shortest_event=0, initial_event=True) else: # convert event_id to its number type instead of default auto-renaming in 0.19.2 events, _ = mne.events_from_annotations( raw, event_id=(lambda x: int(x))) for event_name in used_events.keys(): # mne.pick_events returns any matching events in include # only raise Runtime Error when nothing is found # then we just skip this event try: selected_events = mne.pick_events( events, include=used_events[event_name]) except RuntimeError: continue # transform Raw to Epochs epochs = mne.Epochs( raw, selected_events, event_id={event_name: used_events[event_name]}, event_repeated='drop', tmin=used_intervals[event_name][0], tmax=used_intervals[event_name][1] - 1. / raw.info['sfreq'], picks=picks, proj=False, baseline=None, preload=True) # skip invalid time intervals if len(epochs) == 0: continue # do epochs hook if self._epochs_hook: epochs, caches = self._epochs_hook(epochs, caches) elif hasattr(dataset, 'epochs_hook'): epochs, caches = dataset.epochs_hook( epochs, caches) # FIXME: is this resample reasonable? if self.srate: # as MNE suggested, decimate after extract epochs # low-pass raw object in raw_hook to prevent aliasing problem epochs = epochs.resample(self.srate) # epochs = epochs.decimate(dataset.srate//self.srate) # retrieve X, y and meta X = epochs.get_data() * 1e6 # micro-volt default y = epochs.events[:, -1] trial_ids = np.argwhere(events[:, -1] == list( epochs.event_id.values())[0]).reshape((-1)) meta = pd.DataFrame({ "subject": [subject] * len(epochs), "session": [session] * len(epochs), "run": [run] * len(epochs), "event": [event_name] * len(epochs), "trial_id": trial_ids, "dataset": [dataset.dataset_code] * len(epochs) }) # do data hook if self._data_hook: X, y, meta, caches = self._data_hook( X, y, meta, caches) elif hasattr(dataset, 'data_hook'): X, y, meta, caches = dataset.data_hook( X, y, meta, caches) # collecting data pre_X = Xs.get(event_name) if pre_X is not None: Xs[event_name] = np.concatenate((pre_X, X), axis=0) else: Xs[event_name] = X pre_y = ys.get(event_name) if pre_y is not None: ys[event_name] = np.concatenate((pre_y, y), axis=0) else: ys[event_name] = y pre_meta = metas.get(event_name) if pre_meta is not None: metas[event_name] = pd.concat((pre_meta, meta), axis=0, ignore_index=True) else: metas[event_name] = meta return Xs, ys, metas
# be dropped as the browser window is closed. # # It is possible to plot event markers on epoched data by passing ``events`` # keyword to the epochs plotter. The events are plotted as vertical lines and # they follow the same coloring scheme as :func:`mne.viz.plot_events`. The # events plotter gives you all the events with a rough idea of the timing. # Since the colors are the same, the event plotter can also function as a # legend for the epochs plotter events. It is also possible to pass your own # colors via ``event_colors`` keyword. Here we can plot the reaction times # between seeing the smiley face and the button press (event 32). # # When events are passed, the epoch numbering at the bottom is switched off by # default to avoid overlaps. You can turn it back on via settings dialog by # pressing `o` key. You should check out `help` at the lower left corner of the # window for more information about the interactive features. events = mne.pick_events(events, include=[5, 32]) mne.viz.plot_events(events) epochs['smiley'].plot(events=events) ############################################################################### # To plot individual channels as an image, where you see all the epochs at one # glance, you can use function :func:`mne.Epochs.plot_image`. It shows the # amplitude of the signal over all the epochs plus an average (evoked response) # of the activation. We explicitly set interactive colorbar on (it is also on # by default for plotting functions with a colorbar except the topo plots). In # interactive mode you can scale and change the colormap with mouse scroll and # up/down arrow keys. You can also drag the colorbar with left/right mouse # button. Hitting space bar resets the scale. epochs.plot_image(278, cmap='interactive', sigma=1., vmin=-250, vmax=250) ###############################################################################
for i in range(30): ch = {a[i]: b[i]} newch.update(ch) raw.rename_channels(newch) return raw renamer() #### PREPROCESSING montage = mne.channels.make_standard_montage('standard_1020') raw.set_montage(montage) raw.filter(0.1, 30, fir_design='firwin', skip_by_annotation='edge') events, ids = mne.events_from_annotations(raw) events = mne.pick_events(events, exclude=[99999]) del ids['New Segment/'] ids['vibro'] = ids.pop('1/Key Space') ids['rest'] = ids.pop('2/cond2') epochs = mne.Epochs(raw, events, event_id=ids, tmin=0.0, tmax=5.0, baseline=(0, 0), preload=True) ##### SPECTRAL ANALISYS psds_rst_full, ffreqs = mne.time_frequency.psd_welch(epochs['rest'], fmin=1,
def extract_delays(raw_fname, stim_chan='STI101', misc_chan='MISC001', trig_codes=None, baseline=(-0.100, 0), l_freq=None, h_freq=None, plot_figures=True, crop_plot_time=None): """Estimate onset delay of analogue (misc) input relative to trigger Parameters ========== raw_fname : str Raw file name stim_chan : str Default stim channel is 'STI101' misc_chan : str Default misc channel is 'MISC001' (default, usually visual) trig_codes : int | list of int Trigger values to compare analogue signal to baseline : tuple of int Pre- and post-trigger time to calculate trigger limits from. Defaults to (-0.100, 0.) l_freq : float | None Low cut-off frequency in Hz. Uses mne.io.Raw.filter. h_freq : float | None High cut-off frequency in Hz. Uses mne.io.Raw.filter. plot_figures : bool Plot histogram and "ERP image" of delays (default: True) crop_plot_time : tuple, optional A 2-tuple with (tmin, tmax) being the limits to plot in the figure """ raw = Raw(raw_fname, preload=True) if l_freq is not None or h_freq is not None: picks = pick_types(raw.info, misc=True) raw.filter(l_freq, h_freq, picks=picks) if trig_codes is not None: include_trigs = trig_codes # do some checking here! events = pick_events(find_events(raw, stim_channel=stim_chan, min_duration=0.002), include=include_trigs) delays = np.zeros(events.shape[0]) pick = pick_channels(raw.info['ch_names'], include=[misc_chan]) ana_data = np.sqrt(raw._data[pick, :].squeeze()**2) # rectify! tmin, tmax = baseline offlevel, onlimit = _find_analogue_trigger_limit_sd(raw, events, pick, tmin=tmin, tmax=tmax) for row, unpack_me in enumerate(events): ind, before, after = unpack_me raw_ind = ind - raw.first_samp # really indices into raw! anatrig_ind = _find_next_analogue_trigger(ana_data, raw_ind, offlevel, onlimit, maxdelay_samps=1000) delays[row] = anatrig_ind / raw.info['sfreq'] * 1.e3 if plot_figures: import matplotlib.pyplot as plt plt.ion() fig, axs = plt.subplots(1, 1) axs.hist(delays) axs.set_title('Delay histogram (ms)') imgfig, _ = plt.subplots(1, 2) epochs = Epochs(raw, events, preload=True) if crop_plot_time is not None: epochs.crop(*crop_plot_time) epochs.plot_image(pick, fig=imgfig) # mnefig[0].get_axes()[1].set_title('') stats = dict() stats['mean'] = np.mean(delays) stats['std'] = np.std(delays) stats['median'] = np.median(delays) stats['q10'] = np.percentile(delays, 10.) stats['q90'] = np.percentile(delays, 90.) stats['max_amp'] = np.max(epochs._data[:, pick, :]) # ovr epochs & times stats['min_amp'] = np.min(epochs._data[:, pick, :]) # ovr epochs & times return (delays, stats)
'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F30', 'F31', 'F32', 'G1', 'G2', 'G3', 'G4', 'G5', 'G6', 'G7', 'G8', 'G9', 'G10', 'G11', 'G12', 'G13', 'G14', 'G15', 'G16', 'G17', 'G18', 'G19', 'G20', 'G21', 'G22', 'G23', 'G24', 'G25', 'G26', 'G27', 'G28', 'G29', 'G30', 'G31', 'G32' ] stimchannel = -1 data = mne.io.read_raw_edf('Subject01.bdf', preload=True, stim_channel=-1, eog=range(64, 73)) locs = mne.channels.read_montage('biosemi64') data.pick_types(eeg=True, meg=False, stim=True) locs.ch_names = data.ch_names[0:64] data.set_montage(locs) data.plot_psd(fmin=1, fmax=100, average=False, n_overlap=50) # to find all of the events in the RAW item data given that the stim_channel # is the last channel (which it is in all of our data) mne.find_events(data, stim_channel=data.ch_names[-1]) # find all events as above and only include those with id == 1 # include can be a list mne.pick_events(mne.find_events(data, stim_channel=data.ch_names[-1]), include=1)
def test_read_write_epochs(): """Test epochs from raw files with IO as fif file """ epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0)) evoked = epochs.average() data = epochs.get_data() epochs_no_id = Epochs(raw, pick_events(events, include=event_id), None, tmin, tmax, picks=picks, baseline=(None, 0)) assert_array_equal(data, epochs_no_id.get_data()) eog_picks = fiff.pick_types(raw.info, meg=False, eeg=False, stim=False, eog=True, exclude='bads') eog_ch_names = [raw.ch_names[k] for k in eog_picks] epochs.drop_channels(eog_ch_names) assert_true( len(epochs.info['chs']) == len(epochs.ch_names) == epochs.get_data().shape[1]) data_no_eog = epochs.get_data() assert_true(data.shape[1] == (data_no_eog.shape[1] + len(eog_picks))) # test decim kwarg with warnings.catch_warnings(record=True) as w: epochs_dec = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), decim=4) assert_equal(len(w), 1) data_dec = epochs_dec.get_data() assert_array_equal(data[:, :, epochs_dec._decim_idx], data_dec) evoked_dec = epochs_dec.average() assert_array_equal(evoked.data[:, epochs_dec._decim_idx], evoked_dec.data) n = evoked.data.shape[1] n_dec = evoked_dec.data.shape[1] n_dec_min = n // 4 assert_true(n_dec_min <= n_dec <= n_dec_min + 1) assert_true(evoked_dec.info['sfreq'] == evoked.info['sfreq'] / 4) # test IO epochs.save(op.join(tempdir, 'test-epo.fif')) epochs_read = read_epochs(op.join(tempdir, 'test-epo.fif')) assert_array_almost_equal(epochs_read.get_data(), epochs.get_data()) assert_array_equal(epochs_read.times, epochs.times) assert_array_almost_equal(epochs_read.average().data, evoked.data) assert_equal(epochs_read.proj, epochs.proj) bmin, bmax = epochs.baseline if bmin is None: bmin = epochs.times[0] if bmax is None: bmax = epochs.times[-1] baseline = (bmin, bmax) assert_array_almost_equal(epochs_read.baseline, baseline) assert_array_almost_equal(epochs_read.tmin, epochs.tmin, 2) assert_array_almost_equal(epochs_read.tmax, epochs.tmax, 2) assert_equal(epochs_read.event_id, epochs.event_id) epochs.event_id.pop('1') epochs.event_id.update({'a:a': 1}) # test allow for ':' in key epochs.save(op.join(tempdir, 'foo-epo.fif')) epochs_read2 = read_epochs(op.join(tempdir, 'foo-epo.fif')) assert_equal(epochs_read2.event_id, epochs.event_id) # add reject here so some of the epochs get dropped epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), reject=reject) epochs.save(op.join(tempdir, 'test-epo.fif')) # ensure bad events are not saved epochs_read3 = read_epochs(op.join(tempdir, 'test-epo.fif')) assert_array_equal(epochs_read3.events, epochs.events) data = epochs.get_data() assert_true(epochs_read3.events.shape[0] == data.shape[0]) # test copying loaded one (raw property) epochs_read4 = epochs_read3.copy() assert_array_almost_equal(epochs_read4.get_data(), data) # test equalizing loaded one (drop_log property) epochs_read4.equalize_event_counts(epochs.event_id) epochs.drop_epochs([1, 2], reason='can we recover orig ID?') epochs.save('test-epo.fif') epochs_read5 = read_epochs('test-epo.fif') assert_array_equal(epochs_read5.selection, epochs.selection) assert_array_equal(epochs_read5.drop_log, epochs.drop_log) # Test that one can drop channels on read file epochs_read5.drop_channels(epochs_read5.ch_names[:1])
import mne import numpy as np import pandas as pd raw_path = "/cubric/scratch/c1557187/act_mis/MEG/0002/new_v1/80-000-raw.fif" events_path = "/cubric/scratch/c1557187/act_mis/MEG/0002/new_v1/events-000-eve.fif" beh_path = "/cubric/scratch/c1557187/act_mis/BEH/0002/ses1_0002_1558538484.csv" raw = mne.io.read_raw_fif(raw_path) events = mne.read_events(events_path) beh = pd.read_csv(beh_path) beh = beh.loc[(beh.obs_dir_mod != 0)] onsets = mne.pick_events(events, include=[30, 40]) ends = mne.pick_events(events, include=[60, 70]) duration = ends[:, 0] - onsets[:, 0] all_epochs = [] for ix, event in enumerate(onsets): print(ix, event[2], beh.obs_dir_mod.values[ix]) epoch = mne.Epochs(raw, events=[event], baseline=None, preload=True, tmin=-0.5, tmax=duration[ix] / raw.info["sfreq"] + 1.1, detrend=1) data = epoch.get_data()[0] del_ints = np.arange(500, duration[ix] + 100) data = np.delete(data, del_ints, axis=1) data = data[:, :776]
def process_raw(self, raw, dataset, return_epochs=False): # noqa: C901 """ Process one raw data file. This function apply the preprocessing and eventual epoching on the individual run, and return the data, labels and a dataframe with metadata. metadata is a dataframe with as many row as the length of the data and labels. Parameters ---------- raw: mne.Raw instance the raw EEG data. dataset : dataset instance The dataset corresponding to the raw file. mainly use to access dataset specific information. return_epochs: boolean This flag specifies whether to return only the data array or the complete processed mne.Epochs returns ------- X : Union[np.ndarray, mne.Epochs] the data that will be used as features for the model Note: if return_epochs=True, this is mne.Epochs if return_epochs=False, this is np.ndarray labels: np.ndarray the labels for training / evaluating the model metadata: pd.DataFrame A dataframe containing the metadata """ # get events id event_id = self.used_events(dataset) # find the events, first check stim_channels then annotations stim_channels = mne.utils._get_stim_channel(None, raw.info, raise_error=False) if len(stim_channels) > 0: events = mne.find_events(raw, shortest_event=0, verbose=False) else: try: events, _ = mne.events_from_annotations(raw, event_id=event_id, verbose=False) except ValueError: log.warning("No matching annotations in {}".format( raw.filenames)) return # picks channels if self.channels is None: picks = mne.pick_types(raw.info, eeg=True, stim=False) else: picks = mne.pick_channels(raw.info["ch_names"], include=self.channels, ordered=True) # pick events, based on event_id try: events = mne.pick_events(events, include=list(event_id.values())) except RuntimeError: # skip raw if no event found return # get interval tmin = self.tmin + dataset.interval[0] if self.tmax is None: tmax = dataset.interval[1] else: tmax = self.tmax + dataset.interval[0] X = [] for bandpass in self.filters: fmin, fmax = bandpass # filter data raw_f = raw.copy().filter(fmin, fmax, method="iir", picks=picks, verbose=False) # epoch data baseline = self.baseline if baseline is not None: baseline = ( self.baseline[0] + dataset.interval[0], self.baseline[1] + dataset.interval[0], ) bmin = baseline[0] if baseline[0] < tmin else tmin bmax = baseline[1] if baseline[1] > tmax else tmax else: bmin = tmin bmax = tmax epochs = mne.Epochs( raw_f, events, event_id=event_id, tmin=bmin, tmax=bmax, proj=False, baseline=baseline, preload=True, verbose=False, picks=picks, event_repeated="drop", on_missing="ignore", ) if bmin < tmin or bmax > tmax: epochs.crop(tmin=tmin, tmax=tmax) if self.resample is not None: epochs = epochs.resample(self.resample) # rescale to work with uV if return_epochs: X.append(epochs) else: X.append(dataset.unit_factor * epochs.get_data()) inv_events = {k: v for v, k in event_id.items()} labels = np.array([inv_events[e] for e in epochs.events[:, -1]]) if return_epochs: X = mne.concatenate_epochs(X) elif len(self.filters) == 1: # if only one band, return a 3D array X = X[0] else: # otherwise return a 4D X = np.array(X).transpose((1, 2, 3, 0)) metadata = pd.DataFrame(index=range(len(labels))) return X, labels, metadata
def load_data(dataset="somato", n_splits=10, sfreq=None, epoch=None, filter_params=[2., None], return_array=True, n_jobs=1): """Load and prepare the somato dataset for multiCSC Parameters ---------- dataset : str in {'somato', 'sample'} Dataset to load. n_splits : int Split the signal in n_split signals of same length before returning it. If epoch is provided, the signal is instead splitted according to the epochs and this option is not followed. sfreq : float Sampling frequency of the signal. The data are resampled to match it. epoch : tuple or None If set to a tuple, extract epochs from the raw data, using t_min=epoch[0] and t_max=epoch[1]. Else, use the raw signal, divided in n_splits chunks. filter_params : tuple of length 2 Boundaries of filtering, e.g. (2, None), (30, 40), (None, 40). return_array : boolean If True, return an NumPy array, instead of mne objects. n_jobs : int Number of jobs that can be used for preparing (filtering) the data. Returns ------- X : array, shape (n_splits, n_channels, n_times) The loaded dataset. info : dict MNE dictionary of information about recording settings. """ pick_types_epoch = dict(meg='grad', eeg=False, eog=True, stim=False) pick_types_final = dict(meg='grad', eeg=False, eog=False, stim=False) if dataset == 'somato': data_path = mne.datasets.somato.data_path() subjects_dir = None file_name = join(data_path, 'sub-01', 'meg', 'sub-01_task-somato_meg.fif') raw = mne.io.read_raw_fif(file_name, preload=True) raw.notch_filter(np.arange(50, 101, 50), n_jobs=n_jobs) event_id = 1 # Dipole fit information cov = None # see below file_trans = None file_bem = None elif dataset == 'sample': data_path = mne.datasets.sample.data_path() subjects_dir = join(data_path, "subjects") data_dir = join(data_path, 'MEG', 'sample') file_name = join(data_dir, 'sample_audvis_raw.fif') raw = mne.io.read_raw_fif(file_name, preload=True) raw.notch_filter(np.arange(60, 181, 60), n_jobs=n_jobs) event_id = [1, 2, 3, 4] # Dipole fit information cov = join(data_dir, 'sample_audvis-cov.fif') file_trans = join(data_dir, 'sample_audvis_raw-trans.fif') file_bem = join(subjects_dir, 'sample', 'bem', 'sample-5120-bem-sol.fif') else: raise ValueError('Unknown parameter dataset=%s.' % (dataset, )) raw.filter(*filter_params, n_jobs=n_jobs) baseline = (None, 0) events = mne.find_events(raw, stim_channel='STI 014') events = mne.pick_events(events, include=event_id) # compute the covariance matrix for somato if dataset == "somato": picks_cov = mne.pick_types(raw.info, **pick_types_epoch) epochs_cov = mne.Epochs(raw, events, event_id, tmin=-4, tmax=0, picks=picks_cov, baseline=baseline, reject=dict(grad=4000e-13, eog=350e-6), preload=True) epochs_cov.pick_types(**pick_types_final) cov = mne.compute_covariance(epochs_cov) if epoch: t_min, t_max = epoch picks = mne.pick_types(raw.info, **pick_types_epoch) epochs = mne.Epochs(raw, events, event_id, t_min, t_max, picks=picks, baseline=baseline, reject=dict(grad=4000e-13, eog=350e-6), preload=True) epochs.pick_types(**pick_types_final) info = epochs.info if sfreq is not None: epochs = epochs.resample(sfreq, npad='auto', n_jobs=n_jobs) if return_array: X = epochs.get_data() else: events[:, 0] -= raw.first_samp raw.pick_types(**pick_types_final) info = raw.info if sfreq is not None: raw, events = raw.resample(sfreq, events=events, npad='auto', n_jobs=n_jobs) if return_array: X = raw.get_data() n_channels, n_times = X.shape n_times = n_times // n_splits X = X[:, :n_splits * n_times] X = X.reshape(n_channels, n_splits, n_times).swapaxes(0, 1) # Deep copy before modifying info to avoid issues when saving EvokedArray info = deepcopy(info) event_info = dict(event_id=event_id, events=events, subject=dataset, subjects_dir=subjects_dir, cov=cov, file_bem=file_bem, file_trans=file_trans) info['temp'] = event_info if return_array: n_splits, n_channels, n_times = X.shape X *= tukey(n_times, alpha=0.1)[None, None, :] X /= np.std(X) return X, info elif epoch: return epoch, info else: return raw, info
def test_read_write_epochs(): """Test epochs from raw files with IO as fif file """ epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0)) evoked = epochs.average() data = epochs.get_data() epochs_no_id = Epochs(raw, pick_events(events, include=event_id), None, tmin, tmax, picks=picks, baseline=(None, 0)) assert_array_equal(data, epochs_no_id.get_data()) eog_picks = pick_types(raw.info, meg=False, eeg=False, stim=False, eog=True, exclude="bads") eog_ch_names = [raw.ch_names[k] for k in eog_picks] epochs.drop_channels(eog_ch_names) assert_true(len(epochs.info["chs"]) == len(epochs.ch_names) == epochs.get_data().shape[1]) data_no_eog = epochs.get_data() assert_true(data.shape[1] == (data_no_eog.shape[1] + len(eog_picks))) # test decim kwarg with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") epochs_dec = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), decim=4) assert_equal(len(w), 1) data_dec = epochs_dec.get_data() assert_array_equal(data[:, :, epochs_dec._decim_idx], data_dec) evoked_dec = epochs_dec.average() assert_array_equal(evoked.data[:, epochs_dec._decim_idx], evoked_dec.data) n = evoked.data.shape[1] n_dec = evoked_dec.data.shape[1] n_dec_min = n // 4 assert_true(n_dec_min <= n_dec <= n_dec_min + 1) assert_true(evoked_dec.info["sfreq"] == evoked.info["sfreq"] / 4) # test IO epochs.save(op.join(tempdir, "test-epo.fif")) epochs_read = read_epochs(op.join(tempdir, "test-epo.fif")) assert_array_almost_equal(epochs_read.get_data(), epochs.get_data()) assert_array_equal(epochs_read.times, epochs.times) assert_array_almost_equal(epochs_read.average().data, evoked.data) assert_equal(epochs_read.proj, epochs.proj) bmin, bmax = epochs.baseline if bmin is None: bmin = epochs.times[0] if bmax is None: bmax = epochs.times[-1] baseline = (bmin, bmax) assert_array_almost_equal(epochs_read.baseline, baseline) assert_array_almost_equal(epochs_read.tmin, epochs.tmin, 2) assert_array_almost_equal(epochs_read.tmax, epochs.tmax, 2) assert_equal(epochs_read.event_id, epochs.event_id) epochs.event_id.pop("1") epochs.event_id.update({"a:a": 1}) # test allow for ':' in key epochs.save(op.join(tempdir, "foo-epo.fif")) epochs_read2 = read_epochs(op.join(tempdir, "foo-epo.fif")) assert_equal(epochs_read2.event_id, epochs.event_id) # add reject here so some of the epochs get dropped epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), reject=reject) epochs.save(op.join(tempdir, "test-epo.fif")) # ensure bad events are not saved epochs_read3 = read_epochs(op.join(tempdir, "test-epo.fif")) assert_array_equal(epochs_read3.events, epochs.events) data = epochs.get_data() assert_true(epochs_read3.events.shape[0] == data.shape[0]) # test copying loaded one (raw property) epochs_read4 = epochs_read3.copy() assert_array_almost_equal(epochs_read4.get_data(), data) # test equalizing loaded one (drop_log property) epochs_read4.equalize_event_counts(epochs.event_id) epochs.drop_epochs([1, 2], reason="can we recover orig ID?") epochs.save("test-epo.fif") epochs_read5 = read_epochs("test-epo.fif") assert_array_equal(epochs_read5.selection, epochs.selection) assert_array_equal(epochs_read5.drop_log, epochs.drop_log) # Test that one can drop channels on read file epochs_read5.drop_channels(epochs_read5.ch_names[:1]) # test warnings on bad filenames with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") epochs_badname = op.join(tempdir, "test-bad-name.fif.gz") epochs.save(epochs_badname) read_epochs(epochs_badname) assert_true(len(w) == 2)
def process_raw(self, raw, dataset): # find the events events = mne.find_events(raw, shortest_event=0, verbose=False) channels = () if self.channels is None else self.channels # picks channels picks = mne.pick_types(raw.info, eeg=True, stim=False, include=channels) # get event id if self.events is None: event_id = dataset.event_id else: event_id = {ev: dataset.event_id[ev] for ev in self.events} # pick events, based on event_id try: events = mne.pick_events(events, include=list(event_id.values())) except RuntimeError: # skip raw if no event found return # get interval if self.interval is None: tmin, tmax = dataset.interval else: tmin, tmax = self.interval if self.resample is not None: raw = raw.copy().resample(self.resample) X = [] for bandpass in self.filters: fmin, fmax = bandpass # filter data raw_f = raw.copy().filter(fmin, fmax, method='iir', picks=picks, verbose=False) # epoch data epochs = mne.Epochs(raw_f, events, event_id=event_id, tmin=tmin, tmax=tmax, proj=False, baseline=None, preload=True, verbose=False, picks=picks) X.append(epochs.get_data()) inv_events = {k: v for v, k in event_id.items()} labels = np.array([inv_events[e] for e in epochs.events[:, -1]]) # if only one band, return a 3D array, otherwise return a 4D if len(self.filters) == 1: X = X[0] else: X = np.array(X).transpose((1, 2, 3, 0)) metadata = pd.DataFrame(index=range(len(labels))) return X, labels, metadata
# signal, and even manually mark bad channels (by clicking the channel name) or # bad epochs (by clicking the data) for later dropping. Channels marked "bad" # will be shown in light grey color and will be added to # ``epochs.info['bads']``; epochs marked as bad will be indicated as ``'USER'`` # in ``epochs.drop_log``. # # Here we'll plot only the "catch" trials from the :ref:`sample dataset # <sample-dataset>`, and pass in our events array so that the button press # responses also get marked (we'll plot them in red, and plot the "face" events # defining time zero for each epoch in blue). We also need to pass in # our ``event_dict`` so that the `~mne.Epochs.plot` method will know what # we mean by "button" — this is because subsetting the conditions by # calling ``epochs['face']`` automatically purges the dropped entries from # ``epochs.event_id``: catch_trials_and_buttonpresses = mne.pick_events(events, include=[5, 32]) epochs['face'].plot(events=catch_trials_and_buttonpresses, event_id=event_dict, event_color=dict(button='red', face='blue')) ############################################################################### # To see all sensors at once, we can use butterfly mode and group by selection: epochs['face'].plot(events=catch_trials_and_buttonpresses, event_id=event_dict, event_color=dict(button='red', face='blue'), group_by='selection', butterfly=True) ############################################################################### # Plotting projectors from an ``Epochs`` object
picks = mne.pick_types(raw.info, meg=meg_type, include=[emg_ch], exclude="bads") # find seed-EMG and target-MEG channel indices picks_ch_names = [raw.ch_names[i] for i in picks] seed = picks_ch_names.index(emg_ch) targets = np.arange(len(picks)) indices = seed_target_indices(seed, targets) # prune raw data based on instruction events ev_start and ev_stop #events = mne.find_events(raw, stim_channel="STI101") events1 = mne.event.make_fixed_length_events(raw, duration=3) events2 = mne.event.make_fixed_length_events(raw, start=2, id=2,duration=3) events=np.concatenate([events1, events2]) events[:,0] -= raw.first_samp #epochs = mne.Epochs(Raw, events, event_id=) start_ev = mne.pick_events(events, include=trig_start) stop_ev = mne.pick_events(events, include=trig_stop) if len(start_ev) != len(stop_ev): # equalize print("Different number of start and stop instructions found!") start_ev = start_ev[:len(stop_ev)] # find and combine active periods from raw data raw.pick_channels(picks_ch_names) raws=[] for ev in zip(start_ev,stop_ev): raws.append(raw.copy().crop(ev[0][0]/raw.info["sfreq"]+start_gap,ev[1][0]/raw.info["sfreq"]-stop_gap).get_data()) #raw = raws.pop(0) #raw.append(raws) # Define wavelet frequencies and number of cycles
# number of events that were found, and the unique integer event IDs present: mne.find_events(raw, stim_channel='STI 014') ############################################################################### # .. sidebar:: Including/excluding events # # Just like `~mne.pick_events`, `~mne.read_events` also has ``include`` # and ``exclude`` parameters. # # If some of those events are not of interest, you can easily subselect events # using :func:`mne.pick_events`, which has parameters ``include`` and # ``exclude``. For example, in the sample data Event ID 32 corresponds to a # subject button press, which could be excluded as: events_no_button = mne.pick_events(events, exclude=32) ############################################################################### # It is also possible to combine two Event IDs using :func:`mne.merge_events`; # the following example will combine Event IDs 1, 2 and 3 into a single event # labelled ``1``: merged_events = mne.merge_events(events, [1, 2, 3], 1) print(np.unique(merged_events[:, -1])) ############################################################################### # Note, however, that merging events is not necessary if you simply want to # pool trial types for analysis; the next section describes how MNE-Python uses # *event dictionaries* to map integer Event IDs to more descriptive label # strings. #
fig.tight_layout() ############################################################################### # Display the evoked reconstructed envelope # ----------------------------------------- # # The MNE sample data contains data for auditory (event_id=1 and 2) and # visual stimuli (event_id=3 and 4). We extract the events now so that we can # later identify the atoms related to different events. Note that the # convolutional sparse coding method does not need to know the events for # learning atoms. event_id = [1, 2, 3, 4] events = mne.find_events(raw, stim_channel='STI 014') events = mne.pick_events(events, include=event_id) events[:, 0] -= raw.first_samp ############################################################################### # For each atom (columns), and for each event (rows), we compute the envelope # of the reconstructed signal, align it with respect to the event onsets, and # take the average. For some atoms, the activations are correlated with the # events, leading to a large evoked envelope. The gray area corresponds to # not statistically significant values, computing with sampling. from alphacsc.utils.signal import fast_hilbert from alphacsc.viz.epoch import plot_evoked_surrogates from alphacsc.utils.convolution import construct_X_multi # time window around the events. Note that for the sample datasets, the time # inter-event is around 0.5s
dss = files.get_folders_files(meg_path)[0] dss = [i for i in dss if "ds" in i] dss.sort() for ds in dss: print("INPUT RAW FILE:", ds) numero = int(ds.split(".")[0][-2:]) raw = mne.io.read_raw_ctf(ds, clean_names=True, verbose=False) raw_events = mne.find_events(raw, stim_channel="UPPT002", min_duration=0.002, verbose="DEBUG", consecutive=True) diode_events = mne.pick_events(raw_events, include=[30, 50]) raw = raw.crop(tmin=raw.times[raw_events[0, 0]] - 0.1, tmax=raw.times[raw_events[-1, 0]] + 0.1) raw, events = raw.copy().resample( sfreq, npad="auto", events=raw_events, n_jobs=-1, ) f_n = str(numero).zfill(3) # file number raw_path = op.join(sub_path, "{}-{}-raw.fif".format(subject_id, f_n)) eve_path = op.join(sub_path, "{}-{}-eve.fif".format(subject_id, f_n))
def process_raw(self, raw, dataset): events = mne.find_events(raw, shortest_event=0, verbose=False) channels = () if self.channels is None else self.channels # picks channels picks = mne.pick_types(raw.info, eeg=True, stim=False, include=channels) # get events id event_id = self.used_events(dataset) # pick events, based on event_id try: events = mne.pick_events(events, include=list(event_id.values())) except RuntimeError: # skip raw if no event found return # get interval tmin = self.tmin + dataset.interval[0] if self.tmax is None: tmax = dataset.interval[1] else: tmax = self.tmax + dataset.interval[0] epochs = mne.Epochs(raw, events, event_id=event_id, tmin=tmin - 0.15, tmax=tmax + 0.15, proj=False, baseline=None, preload=True, verbose=False, picks=picks) ix_accept_segm = [ ix for ix, drop_reason in enumerate(epochs.drop_log) if drop_reason == [] ] X, _, freqs_selected = preprocess_data_for_fspoc(epochs, [tmin, tmax], frange=self.frange, nfft=self.nfft, n_jobs=8, return_data=False) class CovarianceFrequency(object): freqs_selected = None def __init__(self, cov): self.cov = cov CovarianceFrequency.freqs_selected = freqs_selected X = np.array([CovarianceFrequency(cov) for cov in X]) labels = self.get_labels(raw)[ix_accept_segm] metadata = pd.DataFrame(index=range(len(labels))) return X, labels, metadata