def extract(subject,
            session,
            recording,
            epoch,
            signal_type='BB',
            BEM='three_layer',
            debug=False,
            chunks=100,
            njobs=4):
    mne.set_log_level('WARNING')
    lcmv.logging.getLogger().setLevel(logging.INFO)
    set_n_threads(1)

    logging.info('Reading stimulus data')

    if epoch == 'stimulus':
        data_cov, epochs, epochs_filename = get_stim_epoch(
            subject, session, recording)
    else:
        data_cov, epochs, epochs_filename = get_response_epoch(
            subject, session, recording)

    raw_filename = glob('TODO' % (subject, session, recording))

    trans_filename = glob('TODO' % (subject, session, recording))[0]
    logging.info('Setting up source space and forward model')

    forward, bem, source = sr.get_leadfield(subject,
                                            raw_filename,
                                            epochs_filename,
                                            trans_filename,
                                            bem_sub_path='bem_ft')
    labels = sr.get_labels(subject)
    labels = sr.labels_exclude(labels,
                               exclude_filters=[
                                   'wang2015atlas.IPS4', 'wang2015atlas.IPS5',
                                   'wang2015atlas.SPL', 'JWDG_lat_Unknown'
                               ])
    labels = sr.labels_remove_overlap(
        labels,
        priority_filters=['wang', 'JWDG'],
    )

    fois_h = np.arange(36, 162, 4)
    fois_l = np.arange(2, 36, 1)
    tfr_params = {
        'HF': {
            'foi': fois_h,
            'cycles': fois_h * 0.25,
            'time_bandwidth': 2 + 1,
            'n_jobs': njobs,
            'est_val': fois_h,
            'est_key': 'HF',
            'sf': 600,
            'decim': 10
        },
        'LF': {
            'foi': fois_l,
            'cycles': fois_l * 0.4,
            'time_bandwidth': 1 + 1,
            'n_jobs': njobs,
            'est_val': fois_l,
            'est_key': 'LF',
            'sf': 600,
            'decim': 10
        }
    }

    events = epochs.events[:, 2]
    filters = lcmv.setup_filters(epochs.info, forward, data_cov, None, labels)
    set_n_threads(1)

    for i in range(0, len(events), chunks):
        filename = lcmvfilename(subject,
                                session,
                                signal_type,
                                recording,
                                chunk=i)
        if os.path.isfile(filename):
            continue
        if signal_type == 'BB':
            logging.info('Starting reconstruction of BB signal')
            M = lcmv.reconstruct_broadband(filters,
                                           epochs.info,
                                           epochs._data[i:i + chunks],
                                           events[i:i + chunks],
                                           epochs.times,
                                           njobs=1)
        else:
            logging.info('Starting reconstruction of TFR signal')
            M = lcmv.reconstruct_tfr(filters,
                                     epochs.info,
                                     epochs._data[i:i + chunks],
                                     events[i:i + chunks],
                                     epochs.times,
                                     est_args=tfr_params[signal_type],
                                     njobs=4)
        M.to_hdf(filename, 'epochs')
    set_n_threads(njobs)
Esempio n. 2
0
def extract_reconstruct_tfr_block(subject,
                                  session,
                                  block,
                                  epoch,
                                  signal_type='BB',
                                  BEM='three_layer',
                                  debug=False,
                                  chunks=50,
                                  njobs=4):

    #check if the block exists and recording
    subject_int = int(subject[1:])
    fname = path + '/filenames_sub%i.pickle' % (subject_int)
    f = open(fname, 'rb')
    data = pickle.load(f)
    df = pd.DataFrame.from_dict(data)
    blocks = np.array(
        df[df.subject == subject_int][df.session == session].block)
    if block in blocks:
        recording = df[df.subject == subject_int][df.session == session][
            df.block == block].trans_matrix.iloc[0]
    else:
        print('block does not exist')
        sys.exit(0)

    if epoch == 'stimulus':
        fname = get_filenames_block(subject, session, block, recording)[0][0]
    else:
        fname = get_filenames_block(subject, session, block, recording)[1][0]

    # fname_aux='filter_sub%i_SESS%i_recording%i_epoch%s.pickle'%( subject_int,session,recording,epoch)
    # filename = join(path+'/extra', fname_aux)
    # f=open(filename,'rb')
    # filters=pickle.load(f)
    # f.close()

    filters = extract_filter(subject,
                             session_int,
                             recording,
                             epoch,
                             signal_type=signal_type)
    fois_h = np.arange(36, 162, 4)
    fois_l = np.arange(2, 36, 1)
    tfr_params = {
        'HF': {
            'foi': fois_h,
            'cycles': fois_h * 0.25,
            'time_bandwidth': 2 + 1,
            'n_jobs': njobs,
            'est_val': fois_h,
            'est_key': 'HF',
            'sf': 600,
            'decim': 10
        },
        'LF': {
            'foi': fois_l,
            'cycles': fois_l * 0.4,
            'time_bandwidth': 1 + 1,
            'n_jobs': njobs,
            'est_val': fois_l,
            'est_key': 'LF',
            'sf': 600,
            'decim': 10
        }
    }

    print(fname)
    print('loading data')
    epochs = preprocessing.load_epochs([fname])
    print('concataneiting data')
    epochs = preprocessing.concatenate_epochs(epochs, None)
    print('Picking pick_channels')
    epochs = epochs.pick_channels(
        [x for x in epochs.ch_names if x.startswith('M')])

    events = epochs.events[:, 2]

    for i in range(0, len(events), chunks):
        print('chunk:', i)
        filename = lcmvfilename_block(subject,
                                      session,
                                      block,
                                      signal_type,
                                      recording,
                                      epoch,
                                      chunk=i)
        if os.path.isfile(filename):
            continue
        if signal_type == 'BB':
            logging.info('Starting reconstruction of BB signal')
            M = lcmv.reconstruct_broadband(filters,
                                           epochs.info,
                                           epochs._data[i:i + chunks],
                                           events[i:i + chunks],
                                           epochs.times,
                                           njobs=1)
        else:
            logging.info('Starting reconstruction of TFR signal')
            M = lcmv.reconstruct_tfr(filters,
                                     epochs.info,
                                     epochs._data[i:i + chunks],
                                     events[i:i + chunks],
                                     epochs.times,
                                     est_args=tfr_params[signal_type],
                                     njobs=1)
        M.to_hdf(filename, 'epochs')
        del M
    del epochs
    print('done')
def extract(
    subject,
    session,
    epoch_type="stimulus",
    signal_type="BB",
    only_glasser=False,
    BEM="three_layer",
    debug=False,
    chunks=100,
    njobs=4,
):
    mne.set_log_level("WARNING")
    pymeglcmv.logging.getLogger().setLevel(logging.INFO)
    set_n_threads(1)

    logging.info("Reading stimulus data")
    if epoch_type == "stimulus":
        data_cov, epochs = get_stim_epoch(subject, session)
    elif epoch_type == "response":
        data_cov, epochs = get_response_epoch(subject, session)
    else:
        raise RuntimeError("Did not recognize epoch")

    logging.info("Setting up source space and forward model")

    forward, bem, source = get_leadfield(subject, session, BEM)

    if not only_glasser:
        labels = pymegsr.get_labels(
            subject="S%02i" % subject,
            filters=["*wang*.label", "*JWDG*.label"],
            annotations=["HCPMMP1"],
        )
        labels = pymegsr.labels_exclude(
            labels=labels,
            exclude_filters=[
                "wang2015atlas.IPS4",
                "wang2015atlas.IPS5",
                "wang2015atlas.SPL",
                "JWDG_lat_Unknown",
            ],
        )
        labels = pymegsr.labels_remove_overlap(
            labels=labels, priority_filters=["wang", "JWDG"])
    else:
        labels = pymegsr.get_labels(
            subject="S%02i" % subject,
            filters=["select_nothing"],
            annotations=["HCPMMP1"],
        )
    # Now chunk Reconstruction into blocks of ~100 trials to save Memory
    fois = np.arange(10, 150, 5)
    lfois = np.arange(1, 10, 1)
    tfr_params = {
        "F": {
            "foi": fois,
            "cycles": fois * 0.1,
            "time_bandwidth": 2,
            "n_jobs": 1,
            "est_val": fois,
            "est_key": "F",
        },
        "LF": {
            "foi": lfois,
            "cycles": lfois * 0.25,
            "time_bandwidth": 2,
            "n_jobs": 1,
            "est_val": lfois,
            "est_key": "LF",
        },
    }

    events = epochs.events[:, 2]
    data = []
    filters = pymeglcmv.setup_filters(epochs.info, forward, data_cov, None,
                                      labels)

    set_n_threads(1)

    for i in range(0, len(events), chunks):
        filename = lcmvfilename(subject,
                                session,
                                signal_type,
                                epoch_type,
                                chunk=i,
                                only_glasser=only_glasser)
        logging.info(filename)
        # if os.path.isfile(filename):
        #    continue
        if signal_type == "BB":
            logging.info("Starting reconstruction of BB signal")
            M = pymeglcmv.reconstruct_broadband(
                filters,
                epochs.info,
                epochs._data[i:i + chunks],
                events[i:i + chunks],
                epochs.times,
                njobs=1,
            )
        else:
            logging.info("Starting reconstruction of TFR signal")
            M = pymeglcmv.reconstruct_tfr(
                filters,
                epochs.info,
                epochs._data[i:i + chunks],
                events[i:i + chunks],
                epochs.times,
                est_args=tfr_params[signal_type],
                njobs=4,
            )
        M.to_hdf(filename, "epochs", mode="w")
    set_n_threads(njobs)
Esempio n. 4
0
def extract_reconstruct_tfr(subject,
                            session,
                            recording,
                            epoch,
                            signal_type='BB',
                            BEM='three_layer',
                            debug=False,
                            chunks=50,
                            njobs=4):

    if epoch == 'stimulus':
        filenames = glob(get_filenames(subject, session, recording)[0])
    else:
        filenames = glob(get_filenames(subject, session, recording)[1])

    subject_int = int(subject[1:])
    fname = 'filter_sub%i_SESS%i_recording%i_epoch%s.pickle' % (
        subject_int, session, recording, epoch)
    filename = join(path + '/extra', fname)
    f = open(filename, 'rb')
    filters = pickle.load(f)
    f.close()
    fois_h = np.arange(36, 162, 4)
    fois_l = np.arange(2, 36, 1)
    tfr_params = {
        'HF': {
            'foi': fois_h,
            'cycles': fois_h * 0.25,
            'time_bandwidth': 2 + 1,
            'n_jobs': njobs,
            'est_val': fois_h,
            'est_key': 'HF',
            'sf': 600,
            'decim': 10
        },
        'LF': {
            'foi': fois_l,
            'cycles': fois_l * 0.4,
            'time_bandwidth': 1 + 1,
            'n_jobs': njobs,
            'est_val': fois_l,
            'est_key': 'LF',
            'sf': 600,
            'decim': 10
        }
    }

    print('filters done')
    for ifname, fname in enumerate(filenames):
        print(fname)
        epochs = preprocessing.load_epochs([fname])
        epochs = preprocessing.concatenate_epochs(epochs, None)
        epochs = epochs.pick_channels(
            [x for x in epochs.ch_names if x.startswith('M')])
        events = epochs.events[:, 2]

        for i in range(0, len(events), chunks):
            print('chunk:', i)
            filename = lcmvfilename(subject,
                                    session,
                                    signal_type,
                                    recording,
                                    epoch + str(ifname),
                                    chunk=i)
            if os.path.isfile(filename):
                continue
            if signal_type == 'BB':
                logging.info('Starting reconstruction of BB signal')
                M = lcmv.reconstruct_broadband(filters,
                                               epochs.info,
                                               epochs._data[i:i + chunks],
                                               events[i:i + chunks],
                                               epochs.times,
                                               njobs=1)
            else:
                logging.info('Starting reconstruction of TFR signal')
                M = lcmv.reconstruct_tfr(filters,
                                         epochs.info,
                                         epochs._data[i:i + chunks],
                                         events[i:i + chunks],
                                         epochs.times,
                                         est_args=tfr_params[signal_type],
                                         njobs=1)
            M.to_hdf(filename, 'epochs')
            del M
        del epochs
        print('done')
Esempio n. 5
0
def extract(
    recording_number,
    epoch,
    signal_type="BB",
    BEM="three_layer",
    chunks=100,
    njobs=4,
    glasser_only=True,
):
    recording = ps.recordings[recording_number]
    mne.set_log_level("ERROR")
    lcmv.logging.getLogger().setLevel(logging.INFO)
    set_n_threads(1)

    logging.info("Reading stimulus data")
    data_cov, epochs = get_epoch(epoch, recording)
    raw_filename = raw_path / recording.filename
    trans_filename = trans_path / (
        "SQC_S%02i-SESS%i_B%i_trans.fif" %
        (recording.subject, recording.session, recording.block[1]))
    epoch_filename = ps.filenames(recording.subject, epoch, recording.session,
                                  recording.block[1])[0]

    logging.info("Setting up source space and forward model")

    forward, bem, source = sr.get_leadfield(
        "SQC_S%02i" % recording.subject,
        str(raw_filename),
        str(epoch_filename),
        str(trans_filename),
        bem_sub_path="bem",
        sdir="/home/nwilming/seqconf/fsdir/",
    )
    if glasser_only:
        labels = sr.get_labels(
            "SQC_S%02i" % recording.subject,
            filters=["*wang2015atlas*"],
            sdir="/home/nwilming/seqconf/fsdir/",
        )
    else:
        labels = sr.get_labels("SQC_S%02i" % recording.subject,
                               sdir="/home/nwilming/seqconf/fsdir/")
        labels = sr.labels_exclude(
            labels,
            exclude_filters=[
                "wang2015atlas.IPS4",
                "wang2015atlas.IPS5",
                "wang2015atlas.SPL",
                "JWDG_lat_Unknown",
            ],
        )
        labels = sr.labels_remove_overlap(labels,
                                          priority_filters=["wang", "JWDG"])

    fois_h = np.arange(36, 162, 4)
    fois_l = np.arange(2, 36, 1)
    tfr_params = {
        "HF": {
            "foi": fois_h,
            "cycles": fois_h * 0.25,
            "time_bandwidth": 2 + 1,
            "n_jobs": njobs,
            "est_val": fois_h,
            "est_key": "HF",
            "sf": 600,
            "decim": 10,
        },
        "LF": {
            "foi": fois_l,
            "cycles": fois_l * 0.4,
            "time_bandwidth": 1 + 1,
            "n_jobs": njobs,
            "est_val": fois_l,
            "est_key": "LF",
            "sf": 600,
            "decim": 10,
        },
    }

    events = epochs.events[:, 2]
    filters = lcmv.setup_filters(epochs.info, forward, data_cov, None, labels)
    set_n_threads(1)

    for i in range(0, len(events), chunks):
        filename = lcmvfilename(
            recording,
            signal_type,
            epoch,
            chunk=i,
        )
        if os.path.isfile(filename):
            continue
        if signal_type == "BB":
            logging.info("Starting reconstruction of BB signal")
            M = lcmv.reconstruct_broadband(
                filters,
                epochs.info,
                epochs._data[i:i + chunks],
                events[i:i + chunks],
                epochs.times,
                njobs=1,
            )
        else:
            logging.info("Starting reconstruction of TFR signal")
            M = lcmv.reconstruct_tfr(
                filters,
                epochs.info,
                epochs._data[i:i + chunks],
                events[i:i + chunks],
                epochs.times,
                est_args=tfr_params[signal_type],
                njobs=4,
            )
        M.to_hdf(str(filename), "epochs")
    set_n_threads(njobs)