Example #1
0
def get_localizer_epochs(filename, reject=dict(mag=4e-12)):
    import locale
    locale.setlocale(locale.LC_ALL, "en_US")
    raw = mne.io.read_raw_ctf(filename, system_clock='ignore')
    sf = float(raw.info['sfreq'])
    mapping = {
        50: ('con_change', 0),
        64: ('stim_onset', 0),
        160: ('start', 0),
        161: ('end', 0)
    }
    meta, timing = preprocessing.get_meta(raw, mapping, {}, 160, 161)
    if len(meta) == 0:
        return None
    tmin, tmax = (timing.min().min() / sf) - \
        5, (max(timing.max().max()) / sf) + 5
    raw = raw.crop(tmin=max(0, tmin), tmax=min(tmax, raw.times[-1]))
    raw.load_data()
    raw.notch_filter(np.arange(50, 251, 50), n_jobs=4)
    meta, timing = preprocessing.get_meta(raw, mapping, {}, 160, 161)

    l = len(timing)
    events = np.vstack([timing.stim_onset_time, [0] * l, [1] * l]).T
    e = mne.Epochs(
        raw,
        events=events.astype(int),
        tmin=-1.25,
        tmax=1.75,
        reject=reject,
    )
    del raw
    return e
def get_preprocessed_block(raw, block):
    start, end = block.start * 60, block.end * 60  # To seconds
    r = raw.copy().crop(start, end)
    print("Processing ", r)
    block_meta, block_timing = get_meta(r, mapping, {}, 41, 41)
    r, ants, artdef = preprocess_block(r)
    return r, ants, artdef
Example #3
0
def get_preprocessed_block(raw, block):
    start, end = block.start*60, block.end*60 # To seconds
    r = raw.copy().crop(start, end)
    print('Processing ',r)
    block_meta, block_timing = get_meta(r, mapping, {}, 41, 41)

    # For subject 10 there is a spurious trigger in coherent_motion on 
    # Filter this one out
    try:
        if len(block_meta.loc[10, 'coherence_on']) == 2:
            block_meta.loc[10, 'coherence_on'] = 1
            block_timing.loc[10, 'coherence_on_time'] = block_timing.loc[10, 'coherence_on_time'][0]
    except TypeError:
        pass


    r, ants, artdef = preprocess_block(r)
    return r, ants, artdef, block_meta, block_timing
def get_blocks(raw):

    meta, timing = get_meta(raw, mapping, {}, 41, 41)
    # meta.loc[:, 'hash'] = np.arange(len(meta))
    # meta.loc[:, 'timing'] = np.arange(len(meta))
    return blocks_from_marker(raw)
Example #5
0
def preprocess(subject, session):
    columns_meta = [
        u"baseline_start",
        u"decision_start",
        u"dot_onset",
        u"feedback",
        u"noise",
        u"response",
        u"rest_delay",
        u"trial_end",
        u"trial_num",
        u"trial_start",
        u"wait_fix",
        u"session_number",
        u"block_start",
    ]

    columns_timing = [
        "baseline_start_time",
        "decision_start_time",
        "dot_onset_time",
        "feedback_time",
        "noise_time",
        "response_time",
        "rest_delay_time",
        "trial_end_time",
        "trial_start_time",
        "wait_fix_time",
    ]

    path = "/mnt/homes/home024/gortega/megdata/"
    path_cluster = "/home/gortega/preprocessed_megdata/sensor_space"
    path_megdata = "/home/gortega/megdata/"

    for file_idx, filename in enumerate(
            glob.glob(
                os.path.join(path_megdata,
                             "*S%i-%i_Att*" % (subject, session)))):
        date = filename[-14:-6]
        raw = mne.io.read_raw_ctf(filename)
        #raw._first_samps = np.cumsum(raw._raw_lengths) - raw._raw_lengths[0]
        #raw._last_samps = np.cumsum(raw._last_samps)

        ##pins and mapping
        other_pins = {100: "session_number", 101: "block_start"}
        trial_pins = {150: "trial_num"}
        mapping = {
            ("noise", 0): 111,
            ("noise", 1): 112,
            ("noise", 2): 113,
            ("start_button", 0): 89,
            ("start_button", 1): 91,
            ("trial_start", 0): 150,
            ("trial_end", 0): 151,
            ("wait_fix", 0): 30,
            ("baseline_start", 0): 40,
            ("dot_onset", 0): 50,
            ("decision_start", 0): 60,
            ("response", -1): 61,
            ("response", 1): 62,
            ("no_decisions", 0): 68,
            ("feedback", 0): 70,
            ("rest_delay", 0): 80,
        }
        mapping = dict((v, k) for k, v in mapping.items())

        # I get metadata and timing of the raw data.
        meta, timing = preprocessing.get_meta(raw, mapping, trial_pins, 150,
                                              151, other_pins)
        index = meta.block_start
        block_times, block_idx = get_blocks(meta, timing)

        for bnum in block_times.keys():
            print(
                "******************************** SESSION",
                session,
                "BLOCK",
                bnum,
                "******************************** ",
            )

            mb2 = meta.loc[block_idx[bnum][0]:block_idx[bnum][1]]
            tb2 = timing.loc[block_idx[bnum][0]:block_idx[bnum][1]]

            tb2 = tb2.dropna(subset=["dot_onset_time"])
            mb2 = mb2.dropna(subset=["dot_onset"])
            index = []
            for idx in tb2.index:
                try:
                    if len(tb2.loc[idx, "dot_onset_time"]) == 10:
                        index.append(idx)
                        tb2.loc[idx, "first_dot_time"] = tb2.loc[
                            idx, "dot_onset_time"][0]
                        mb2.loc[idx, "first_dot"] = mb2.loc[idx,
                                                            "dot_onset"][0]
                except TypeError:
                    pass
            tb2 = tb2.loc[index]
            mb2 = mb2.loc[index]

            r = raw.copy()  # created a copy to do not change raw
            r.crop(
                tmin=block_times[bnum][0] /
                1200,  #[tb2.trial_start_time.min() / 1200.0 - 1,
                tmax=block_times[bnum][1] /
                1200  #1 + (tb2.feedback_time.max() / 1200.0),
            )  # crop for each block
            r = interpolate_bad_channels(subject, session, r)
            mb, tb = meg.preprocessing.get_meta(
                r, mapping, trial_pins, 150, 151,
                other_pins)  # get new metadata for each block

            mb = eliminate_spurious_columns(
                mb, columns_meta)  # sometime we need to drop some columns
            tb = eliminate_spurious_columns(tb, columns_timing)
            tb = tb.dropna()
            mb = mb.dropna()
            r, ants, artdef = meg.preprocessing.preprocess_block(
                r,
                blinks=True)  # preprocess of each block looking for artifacts

            print("Notch filtering")
            midx = np.where([x.startswith("M") for x in r.ch_names])[0]
            r.load_data()
            r.notch_filter(np.arange(50, 251, 50), picks=midx)

            bad_channels = r.info["bads"]
            if len(r.info["bads"]) > 0:
                r.load_data()
                r.interpolate_bads(reset_bads=False)
                r.info["bads"] = []

            trial_repeat = []
            mb.loc[:, "hash"] = hash(subject, mb.session_number, bnum,
                                     mb.trial_num)

            # Create a colum for onset of first dot
            tb.loc[:, 'first_dot_time'] = np.array(
                [x[0] for x in tb.dot_onset_time])
            stimmeta, stimlock = preprocessing.get_epoch(
                r,
                mb.dropna(),
                tb.dropna(),
                event="first_dot_time",
                epoch_time=(-1, 3.5),
                epoch_label="hash",
                reject_time=(0, 2),
            )

            if len(stimmeta) > 0:
                stim_filename = join(
                    path_cluster,
                    "down_sample_stim_meta_sub%i_sess%i_block%i_offset%i" %
                    (subject, session, bnum, file_idx),
                )
                stimlock.resample(600, npad="auto")
                stimmeta.to_hdf(stim_filename + ".hdf", "meta")
                stimlock.save(stim_filename + "-epo.fif.gz")

            rlmeta, resplock = preprocessing.get_epoch(
                r,
                mb.dropna(),
                tb.dropna(),
                event="response_time",
                epoch_time=(-2.5, 0.5),
                epoch_label="hash",
                reject_time=(-1, 0.4),
            )
            if len(rlmeta) > 0:
                resp_filename = join(
                    path_cluster,
                    "down_sample_resp_meta_sub%i_sess%i_block%i_offset%i" %
                    (subject, session, bnum, file_idx),
                )
                resplock.resample(600, npad="auto")
                rlmeta.to_hdf(resp_filename + ".hdf", "meta")
                resplock.save(resp_filename + "-epo.fif.gz")

    print("done")
Example #6
0
            ('wait_fix', 0): 30,
            ('baseline_start', 0): 40,
            ('dot_onset', 0): 50,
            ('decision_start', 0): 60,
            ('response', -1): 61,
            ('response', 1): 62,
            ('no_decisions', 0): 68,
            ('feedback', 0): 70,
            ('rest_delay', 0): 80
        }

        mapping = dict((v, k) for k, v in mapping.items())
        #    I created the key_map to give a number to each trial.

        #I get metadata and timing of the raw data.
        meta, timing = preprocessing.get_meta(raw, mapping, trial_pins, 150,
                                              151, other_pins)
        index = meta.block_start

        #I separate the raw data by blocks
        #time where the block start
        events = mne.find_events(raw, 'UPPT001', shortest_event=1)
        index_b_start = np.where(
            events[:, 2] == 100)  #block start on every trigger 100
        time_block = []
        time_block2 = []
        for i in index_b_start[0]:
            time_block.append(events[i][0])
        if len(meta.groupby('block_start')) == len(time_block):
            pass
        else:
            if filename == path_megdata + 'S17-7_Attractor_20161130_01.ds':  ####here I solve the problem with subject17