def apply_rois(fn_stc, tmin, tmax, thr, min_subject='fsaverage'):
    #fn_avg = subjects_dir+'/fsaverage/%s_ROIs/%s-lh.stc' %(method,evt_st)
    stc_avg = mne.read_source_estimate(fn_stc)
    stc_avg = stc_avg.crop(tmin, tmax)
    src_pow = np.sum(stc_avg.data ** 2, axis=1)
    stc_avg.data[src_pow < np.percentile(src_pow, thr)] = 0.
    fn_src = subjects_dir+'/%s/bem/fsaverage-ico-5-src.fif' %min_subject
    src_inv = mne.read_source_spaces(fn_src)
    func_labels_lh, func_labels_rh = mne.stc_to_label(
                    stc_avg, src=src_inv, smooth=True,
                    subjects_dir=subjects_dir,
                    connected=True)
    # Left hemisphere definition
    i = 0
    labels_path = fn_stc[:fn_stc.rfind('-')] + '/ini'
    reset_directory(labels_path)
    while i < len(func_labels_lh):
        func_label = func_labels_lh[i]
        func_label.save(labels_path + '/ROI_%d' %(i))
        i = i + 1
    # right hemisphere definition
    j = 0
    while j < len(func_labels_rh):
        func_label = func_labels_rh[j]
        func_label.save(labels_path + '/ROI_%d' %(j))
        j = j + 1
def group_rois(am_sub=0, min_subject='fsaverage'):
    """
    choose commont ROIs come out in at least 'sum_sub' subjects
    ----------
    am_sub: the least amount of subjects have the common ROIs.
    subjects_dir: the directory of the subjects.
    min_subject: the subject for the common brain space.
    """
    import shutil
    subjects_dir = os.environ['SUBJECTS_DIR']
    min_path = subjects_dir + '/%s' %min_subject 
    com_path = min_path + '/Group_ROIs/common/'
    mer_path = min_path + '/Group_ROIs/merged/'
    reset_directory(com_path)
    list_dirs = os.walk(mer_path)
    label_list = ['']
    for root, dirs, files in list_dirs:
        for f in files:
            label_fname = os.path.join(root, f)
            label_list.append(label_fname)
    label_list = label_list[1:]
    for fn_label in label_list:
        fn_name = os.path.basename(fn_label)
        subjects = (fn_name.split('_')[0]).split(',')
        if len(subjects) >= am_sub:
            shutil.copy(fn_label, com_path)
def apply_merge(labels_path, evt_list):
    '''Every condition have two kinds of events, merge the two kinds of standard 
       ROIs together to form the terminal ROIs for causality analyze.
       
       Parameters
       ----------
       labels_path: string.
            The name of the total labels path.
       evt_list: list.
            The name of the subpath under 'labels_path', such as: '['LLst', 'LLrt']'
    '''
    import glob, shutil
    for evt in evt_list:
        mer_path = labels_path + '%s/merged/' %evt[0]
        reset_directory(mer_path)
        source0_path = labels_path + '%s/standard/' %evt[0]
        source1_path = labels_path + '%s/standard/' %evt[1]
        source = glob.glob(os.path.join(source0_path, '*.*'))
        source = source + glob.glob(os.path.join(source1_path, '*.*'))
        for filename in source:
            shutil.copy(filename, mer_path)
             
        reducer = True
        while reducer:
            list_dirs = os.walk(mer_path)
            label_list = ['']
            for root, dirs, files in list_dirs:
                for f in files:
                    label_fname = os.path.join(root, f)
                    label_list.append(label_fname)
            label_list = label_list[1:]
            len_class = _cluster_merge(mer_path, label_list)
            if len_class == len(label_list):
                reducer = False  
def apply_merge(labels_path, evt_list):
    import glob, shutil

    for evt in evt_list:
        mer_path = labels_path + "%s/merged/" % evt[0]
        reset_directory(mer_path)
        source0_path = labels_path + "%s/standard/" % evt[0]
        source1_path = labels_path + "%s/standard/" % evt[1]
        source = glob.glob(os.path.join(source0_path, "*.*"))
        source = source + glob.glob(os.path.join(source1_path, "*.*"))
        for filename in source:
            shutil.copy(filename, mer_path)

        reducer = True
        while reducer:
            list_dirs = os.walk(mer_path)
            label_list = [""]
            for root, dirs, files in list_dirs:
                for f in files:
                    label_fname = os.path.join(root, f)
                    label_list.append(label_fname)
            label_list = label_list[1:]
            len_class = _cluster1_rois(mer_path, label_list)
            if len_class == len(label_list):
                reducer = False
def apply_rois(fn_stc, event, tmin=0.0, tmax=0.3, min_subject='fsaverage', thr=99):
    """
    Compute regions of interest (ROI) based on events
    ----------
    fn_stc : string
        evoked and morphed STC.
    event: string
        event of the related STC.
    tmin, tmax: float
        segment for ROIs definition.
    min_subject: string
        the subject as the common brain space.
    thr: float or int
        threshold of STC used for ROI identification.
    """
    fnlist = get_files_from_list(fn_stc)
    # loop across all filenames
    for ifn_stc in fnlist:
        subjects_dir = os.environ['SUBJECTS_DIR']
        # extract the subject infromation from the file name
        stc_path = os.path.split(ifn_stc)[0]
        #name = os.path.basename(fn_stc)
        #tri = name.split('_')[1].split('-')[0]
        min_path = subjects_dir + '/%s' % min_subject
        fn_src = min_path + '/bem/fsaverage-ico-4-src.fif'
        # Make sure the target path is exist
        labels_path = stc_path + '/%s/' %event
        reset_directory(labels_path)
        # Read the MNI source space
        src_inv = mne.read_source_spaces(fn_src)
        stc = mne.read_source_estimate(ifn_stc, subject=min_subject)
        bc_stc = stc.copy().crop(tmin, tmax)
        src_pow = np.sum(bc_stc.data ** 2, axis=1)
        bc_stc.data[src_pow < np.percentile(src_pow, thr)] = 0.
        #stc_data = stc_morph.data
        #import pdb
        #pdb.set_trace()
        #zscore stc for ROIs estimation
        #d_mu = stc_data.mean(axis=1, keepdims=True)
        #d_std = stc_data.std(axis=1, ddof=1, keepdims=True)
        #z_data = (stc_data - d_mu)/d_std
        func_labels_lh, func_labels_rh = mne.stc_to_label(
            bc_stc, src=src_inv, smooth=True,
            subjects_dir=subjects_dir,
            connected=True)
        # Left hemisphere definition
        i = 0
        while i < len(func_labels_lh):
            func_label = func_labels_lh[i]
            func_label.save(labels_path + '%s_%s' % (event, str(i)))
            i = i + 1
        # right hemisphere definition
        j = 0
        while j < len(func_labels_rh):
            func_label = func_labels_rh[j]
            func_label.save(labels_path + '%s_%s' % (event, str(j)))
            j = j + 1    
Exemple #6
0
def apply_rois(fn_stc_list, event, min_subject='fsaverage', thr=0.85):
    """
    Compute regions of interest (ROI) based on events
    ----------
    fn_stc : string
        evoked and morphed STC.
    event: string
        event of the related STC.
    tmin, tmax: float
        segment for ROIs definition.
    min_subject: string
        the subject as the common brain space.
    thr: float or int
        threshold of STC used for ROI identification.
    """
    #from scipy.signal import detrend
    #from scipy.stats.mstats import zscore 
    fnlist = get_files_from_list(fn_stc_list)
    # loop across all filenames
    for fn_stc in fnlist:
        # extract the subject infromation from the file name
        stc_path = os.path.split(fn_stc)[0]
        min_path = subjects_dir + '/%s' % min_subject
        fn_src = min_path + '/bem/fsaverage-ico-5-src.fif'
        # Make sure the target path is exist
        labels_path = stc_path + '/%s/ini' %event
        reset_directory(labels_path)
      
        # Read the MNI source space
        stc = mne.read_source_estimate(fn_stc)
        src_inv = mne.read_source_spaces(fn_src)
        stc.lh_data[stc.lh_data < 0.85 * np.max(stc.lh_data)] = 0
        stc.rh_data[stc.rh_data < 0.8 * np.max(stc.rh_data)] = 0
        #data_lh=np.squeeze(stc.lh_data)
        #index_lh = np.argwhere(data_lh)
        #stc.lh_data[data_lh<np.percentile(data_lh[index_lh], thr)] = 0
        #data_rh=np.squeeze(stc.rh_data)
        #index_rh = np.argwhere(data_rh)
        #stc.rh_data[data_rh<np.percentile(data_rh[index_rh], thr)] = 0
        #non_index = np.argwhere(data)
        #stc.data[data<np.percentile(data[non_index], thr)] = 0
        func_labels_lh, func_labels_rh = mne.stc_to_label(
                    stc, src=src_inv, smooth=True,
                    subjects_dir=subjects_dir,
                    connected=True)
        # Left hemisphere definition
        i = 0
        while i < len(func_labels_lh):
            func_label = func_labels_lh[i]
            func_label.save(labels_path + '/%s_%d' %(event, i))
            i = i + 1
        # right hemisphere definition
        j = 0
        while j < len(func_labels_rh):
            func_label = func_labels_rh[j]
            func_label.save(labels_path + '/%s_%d' %(event, j))
            j = j + 1
def apply_rois(fn_stcs, event='LLst', tmin=0.0, tmax=0.6, tstep=0.05, window=0.2, 
               fmin=4, fmax=8, thr=99, min_subject='fsaverage'): 
    """
    Compute regions of interest (ROI) based on events
    ----------
    fn_stcs : the file name of morphed stc.
    evt: event related with stc
    thr: the percentile of stc's strength
    min_subject: the subject for the common brain space.
    
    """
    from mne import read_source_spaces 
    fnlist = get_files_from_list(fn_stcs)
    # loop across all filenames
    for fn_stc in fnlist:
        name = os.path.basename(fn_stc)
        subject = name.split('_')[0]
        subjects_dir = os.environ['SUBJECTS_DIR']
        min_dir = subjects_dir + '/%s' %min_subject
        labels_path = min_dir + '/DICS_ROIs/%s/%s/' %(subject, event)
        reset_directory(labels_path)
        src = min_dir + '/bem/%s-ico-4-src.fif' %min_subject
        src_inv = read_source_spaces(src)
        stc = mne.read_source_estimate(fn_stc, subject=min_subject) 
        stc = stc.crop(tmin, tmax)
        src_pow = np.sum(stc.data ** 2, axis=1)
        stc.data[src_pow < np.percentile(src_pow, thr)] = 0.
        tbeg = tmin
        while tbeg < tmax:
            tend = tbeg + window
            win_stc = stc.copy().crop(tbeg, tend)
            stc_data = win_stc.data 
            src_pow = np.sum(stc_data ** 2, axis=1)
            win_stc.data[src_pow < np.percentile(src_pow, thr)] = 0.
            func_labels_lh, func_labels_rh = mne.stc_to_label(
                win_stc, src=src_inv, smooth=True,
                subjects_dir=subjects_dir,
                connected=True)
            # Left hemisphere definition
            i = 0
            while i < len(func_labels_lh):
                func_label = func_labels_lh[i]
                func_label.save(labels_path + '%s_%s_win%.2f_%2f' % (event, str(i), tbeg, tend))
                i = i + 1
            # right hemisphere definition
            j = 0
            while j < len(func_labels_rh):
                func_label = func_labels_rh[j]
                func_label.save(labels_path + '%s_%s_win%2f_%2f' % (event, str(j), tbeg, tend))
                j = j + 1
            tbeg = tbeg + tstep
def apply_rois(fn_stcs, evt='LLst', tmin=0.05, tmax=0.25, thr=99, min_subject='fsaverage'): 
    """
    Compute regions of interest (ROI) based on events
    ----------
    fn_stcs : the file name of morphed stc.
    evt: event related with stc
    thr: the percentile of stc's strength
    min_subject: the subject for the common brain space.
    
    """
    from mne import read_source_spaces 
    fnlist = get_files_from_list(fn_stcs)
    # loop across all filenames
    for fn_stc in fnlist:
        name = os.path.basename(fn_stc)
        subject = name.split('_')[0]
        subjects_dir = os.environ['SUBJECTS_DIR']
        min_dir = subjects_dir + '/%s' %min_subject
        labels_path = min_dir + '/DICS_ROIs/%s/%s/' %(subject, evt)
        reset_directory(labels_path)
        src = min_dir + '/bem/%s-ico-4-src.fif' %min_subject
        src_inv = read_source_spaces(src)
        stc = mne.read_source_estimate(fn_stc, subject=min_subject) 
        stc = stc.crop(tmin, tmax)
        src_pow = np.sum(stc.data ** 2, axis=1)
        stc.data[src_pow < np.percentile(src_pow, thr)] = 0.
        #stc_data = stc_morph.data
        #import pdb
        #pdb.set_trace()
        #zscore stc for ROIs estimation
        #d_mu = stc_data.mean(axis=1, keepdims=True)
        #d_std = stc_data.std(axis=1, ddof=1, keepdims=True)
        #z_data = (stc_data - d_mu)/d_std
        func_labels_lh, func_labels_rh = mne.stc_to_label(
            stc, src=src_inv, smooth=True,
            subjects_dir=subjects_dir,
            connected=True)
        # Left hemisphere definition
        i = 0
        while i < len(func_labels_lh):
            func_label = func_labels_lh[i]
            func_label.save(labels_path + '%s_%s' % (evt, str(i)))
            i = i + 1
        # right hemisphere definition
        j = 0
        while j < len(func_labels_rh):
            func_label = func_labels_rh[j]
            func_label.save(labels_path + '%s_%s' % (evt, str(j)))
            j = j + 1  
Exemple #9
0
def merge_rois(labels_path_list, event='LLst'):
    """
    merge ROIs, so that the overlapped lables merged into one. 
    If 'group' is False, ROIs from all the events are merged and 
    saved in the folder 'ROIs' under the 'labels_path'.
    If 'group' is True, ROIs from all the subjects are merged and
    saved in the folder 'merged' under the 'labels_path'.
    ----------
    labels_path: the total path of all the ROIs' folders.
    group: if 'group' is False, merge ROIs from different events within one
           subject, if 'group' is True, merge ROIs across subjects.
    evelist: events name of all subfolders
    """
    path_list = get_files_from_list(labels_path_list)
    # loop across all filenames
    for labels_path in path_list:
        import glob, shutil
     
        mer_path = labels_path + '/ROIs/'
        reset_directory(mer_path)
        source_path = labels_path + '/%s' %event
        for filename in glob.glob(os.path.join(source_path, '*.*')):
            shutil.copy(filename, mer_path)

        # Merge the individual subject's ROIs
        reducer = True
        count = 1
        while reducer:
            list_dirs = os.walk(mer_path)
            label_list = ['']
            for root, dirs, files in list_dirs:
                for f in files:
                    label_fname = os.path.join(root, f)
                    label_list.append(label_fname)
            label_list = label_list[1:]
            len_class = _cluster_rois(mer_path, label_list, count)
            if len_class == len(label_list):
                reducer = False  
            count = count + 1
#subject_id = 'fsaverage'
#list_dirs = os.walk(labels_path) 
#lh_mvs = []
#rh_mvs = []
#for root, dirs, files in list_dirs: 
#    for f in files:
#        label_fname = os.path.join(root, f) 
#        label = mne.read_label(label_fname)
#        pca = stc_avg.extract_label_time_course(label, src_inv, mode='pca_flip')
def apply_stand(fn_stc, radius=5.0, min_subject='fsaverage', tmin=0.1, tmax=0.5):

    """
    Standardize the size of the selected ROIs.
    Parameters
    ----------
    fn_stc: string or list
        The path of the common STCs.
    radius: the radius of every ROI. 
    tmin, tmax: float (s).
        The interest time range.
    """
    fnlist = get_files_from_list(fn_stc)
    # loop across all filenames
    for fn_stc in fnlist:
        stc_path = fn_stc[:fn_stc.rfind('-')]
        stc = mne.read_source_estimate(fn_stc, subject=min_subject)
        stc = stc.crop(tmin, tmax)
        #min_path = subjects_dir + '/%s' %min_subject
        # extract the subject infromation from the file name
        source_path = stc_path + '/ROIs/'
        stan_path = stc_path + '/standard/'
        reset_directory(stan_path)
        list_dirs = os.walk(source_path)
        for root, dirs, files in list_dirs:
            for f in files:
                label_fname = os.path.join(root, f)
                label = mne.read_label(label_fname)
                stc_label = stc.in_label(label)
                src_pow = np.sum(stc_label.data ** 2, axis=1)
                if label.hemi == 'lh':
                    # Get the max MNE value within each ROI
                    seed_vertno = stc_label.vertices[0][np.argmax(src_pow)]
                    func_label = mne.grow_labels(min_subject, seed_vertno,
                                                 extents=radius, hemis=0,
                                                 subjects_dir=subjects_dir,
                                                 n_jobs=1)
                    func_label = func_label[0]
                    func_label.save(stan_path + '%s' %f)
                elif label.hemi == 'rh':
                    seed_vertno = stc_label.vertices[1][np.argmax(src_pow)]
                    func_label = mne.grow_labels(min_subject, seed_vertno,
                                                 extents=radius, hemis=1,
                                                 subjects_dir=subjects_dir,
                                                 n_jobs=1)
                    func_label = func_label[0]
                    func_label.save(stan_path + '%s' %f)
def merge_rois(labels_path_list, group=False, evelist=['LLst','LLrt']):
    """
    merge ROIs, so that the overlapped lables merged into one. 
    If 'group' is False, ROIs from all the events are merged and 
    saved in the folder 'ROIs' under the 'labels_path'.
    If 'group' is True, ROIs from all the subjects are merged and
    saved in the folder 'merged' under the 'labels_path'.
    ----------
    labels_path: the total path of all the ROIs' folders.
    group: if 'group' is False, merge ROIs from different events within one
           subject, if 'group' is True, merge ROIs across subjects.
    evelist: events name of all subfolders
    """
    path_list = get_files_from_list(labels_path_list)
    # loop across all filenames
    for labels_path in path_list:
        import glob, shutil
        if group is False:
            mer_path = labels_path + '/ROIs/'
            reset_directory(mer_path)
            for eve in evelist:
                source_path = labels_path + '/%s' %eve
                for filename in glob.glob(os.path.join(source_path, '*.*')):
                    shutil.copy(filename, mer_path)
        elif group is True:
            mer_path = labels_path + 'merged/'
            reset_directory(mer_path)
            source_path = labels_path + 'standard/' 
            for filename in glob.glob(os.path.join(source_path, '*.*')):
                shutil.copy(filename, mer_path)
        # Merge the individual subject's ROIs
        reducer = True
        count = 1
        while reducer:
            list_dirs = os.walk(mer_path)
            label_list = ['']
            for root, dirs, files in list_dirs:
                for f in files:
                    label_fname = os.path.join(root, f)
                    label_list.append(label_fname)
            label_list = label_list[1:]
            len_class = _cluster_rois(mer_path, label_list, count)
            if len_class == len(label_list):
                reducer = False  
            count = count + 1
def sele_rois(fn_stc_list, fn_src, min_dist, weight, tmin=0.1, tmax=0.5):
    """
    Select ROIs based on the least distance and the difference of Euclidean_norms
    between ROIs candidates.
    
    Parameters
    ----------
    fn_stc_list: string or list 
        The path of the common STCs.
    fn_src: string
       The path of the common source space, such as: '*/fsaverage/bem/*-src.fif'
    min_dist: int (mm)
        Least distance between ROIs candidates.
    weight: float
        Euclidean_norms weight related to the larger candidate's standard deviation.
    tmin, tmax: float (s).
        The interest time range.
    """
    fn_stc_list = get_files_from_list(fn_stc_list)
    # loop across all filenames
    for fn_stc in fn_stc_list:
        import glob, shutil
        labels_path = fn_stc[:fn_stc.rfind('-')] 
        source_path = labels_path + '/ini/' 
        sel_path = labels_path + '/ROIs/'        
        reset_directory(sel_path)
        for filename in glob.glob(os.path.join(source_path, '*.*')):
            shutil.copy(filename, sel_path)
        reducer = True
        stc = mne.read_source_estimate(fn_stc)
        stc = stc.crop(tmin, tmax)
        src = mne.read_source_spaces(fn_src)
        while reducer:
            list_dirs = os.walk(sel_path)
            label_list = ['']
            for root, dirs, files in list_dirs:
                for f in files:
                    label_fname = os.path.join(root, f)
                    label_list.append(label_fname)
            label_list = label_list[1:]
            len_class = _cluster_sel(sel_path, label_list, stc, src, min_dist, weight)
            if len_class == len(label_list):
                reducer = False  
Exemple #13
0
def apply_stand(fn_stc, event, radius=5.0, min_subject='fsaverage'):

    """
    ----------
    fname: averaged STC of the trials.
    radius: the radius of every ROI. 
    """
    fnlist = get_files_from_list(fn_stc)
    # loop across all filenames
    for fn_stc in fnlist:
        import glob, shutil
        labels_path = os.path.split(fn_stc)[0]
        stan_path = labels_path + '/%s/standard/' %event
        reset_directory(stan_path)
        source_path = labels_path + '/%s/ini/' %event
        source = glob.glob(os.path.join(source_path, '*.*'))
        for filename in source:
            shutil.copy(filename, stan_path)
        stc = mne.read_source_estimate(fn_stc, subject=min_subject)
        list_dirs = os.walk(stan_path)
        for root, dirs, files in list_dirs:
            for f in files:
                label_fname = os.path.join(root, f)
                label = mne.read_label(label_fname)
                stc_label = stc.in_label(label)
                src_pow = np.sum(stc_label.data ** 2, axis=1)
                if label.hemi == 'lh':
                    # Get the max MNE value within each ROI
                    seed_vertno = stc_label.vertices[0][np.argmax(src_pow)]
                    func_label = mne.grow_labels(min_subject, seed_vertno,
                                                 extents=radius, hemis=0,
                                                 subjects_dir=subjects_dir,
                                                 n_jobs=1)
                    func_label = func_label[0]
                    func_label.save(stan_path + '%s' %f)
                elif label.hemi == 'rh':
                    seed_vertno = stc_label.vertices[1][np.argmax(src_pow)]
                    func_label = mne.grow_labels(min_subject, seed_vertno,
                                                 extents=radius, hemis=1,
                                                 subjects_dir=subjects_dir,
                                                 n_jobs=1)
                    func_label = func_label[0]
                    func_label.save(stan_path + '%s' %f)
def apply_rois(fn_stc, tmin, tmax, thr, min_subject='fsaverage'):

    '''
       Make ROIs using the common STCs.
        
       Parameters
       ----------
       fn_stc: string.
            The path of the common STCs
       tmin, tmax: float (s).
            The interest time range.
       thr: float or int
            The percentile of STCs strength.
       min_subject: string.
            The common subject.
       
    '''
    stc_avg = mne.read_source_estimate(fn_stc)
    stc_avg = stc_avg.crop(tmin, tmax)
    src_pow = np.sum(stc_avg.data ** 2, axis=1)
    stc_avg.data[src_pow < np.percentile(src_pow, thr)] = 0.
    fn_src = subjects_dir+'/%s/bem/fsaverage-ico-5-src.fif' %min_subject
    src_inv = mne.read_source_spaces(fn_src)
    func_labels_lh, func_labels_rh = mne.stc_to_label(
                    stc_avg, src=src_inv, smooth=True,
                    subjects_dir=subjects_dir,
                    connected=True)
    # Left hemisphere definition
    i = 0
    labels_path = fn_stc[:fn_stc.rfind('-')] + '/ini'
    reset_directory(labels_path)
    while i < len(func_labels_lh):
        func_label = func_labels_lh[i]
        func_label.save(labels_path + '/ROI_%d' %(i))
        i = i + 1
    # right hemisphere definition
    j = 0
    while j < len(func_labels_rh):
        func_label = func_labels_rh[j]
        func_label.save(labels_path + '/ROI_%d' %(j))
        j = j + 1
Exemple #15
0
def apply_sele(fn_stc_list, fn_src, min_dist):
    """
    select ROIs, so that the overlapped lables merged into one. 
    If 'group' is False, ROIs from all the events are merged and 
    saved in the folder 'ROIs' under the 'labels_path'.
    If 'group' is True, ROIs from all the subjects are merged and
    saved in the folder 'merged' under the 'labels_path'.
    ----------
    labels_path: the total path of all the ROIs' folders.
    group: if 'group' is False, merge ROIs from different events within one
           subject, if 'group' is True, merge ROIs across subjects.
    evelist: events name of all subfolders
    """
    fn_stc_list = get_files_from_list(fn_stc_list)
    # loop across all filenames
    for fn_stc in fn_stc_list:
        import glob, shutil
        labels_path = os.path.split(fn_stc)[0]
        sel_path = labels_path + '/ROIs/sele/' 
        reset_directory(sel_path)
        source_path = labels_path + '/ROIs/merge/' 
        source = glob.glob(os.path.join(source_path, '*.*'))
        for filename in source:
            shutil.copy(filename, sel_path) 
        reducer = True
        stc = mne.read_source_estimate(fn_stc)
        stc.data[stc.data < 0] = 0
        #stc = stc.crop(tmin, tmax)
        src = mne.read_source_spaces(fn_src)
        while reducer:
            list_dirs = os.walk(sel_path)
            label_list = ['']
            for root, dirs, files in list_dirs:
                for f in files:
                    label_fname = os.path.join(root, f)
                    label_list.append(label_fname)
            label_list = label_list[1:]
            len_class = _cluster_rois(sel_path, label_list, stc, src, min_dist)
            if len_class == len(label_list):
                reducer = False  
def apply_stand(fn_stc, radius=5.0, min_subject="fsaverage", tmin=0.1, tmax=0.5):

    """
    ----------
    fname: averaged STC of the trials.
    radius: the radius of every ROI. 
    """
    fnlist = get_files_from_list(fn_stc)
    # loop across all filenames
    for fn_stc in fnlist:
        stc_path = fn_stc[: fn_stc.rfind("-")]
        stc = mne.read_source_estimate(fn_stc, subject=min_subject)
        stc = stc.crop(tmin, tmax)
        # min_path = subjects_dir + '/%s' %min_subject
        # extract the subject infromation from the file name
        source_path = stc_path + "/ROIs/"
        stan_path = stc_path + "/standard/"
        reset_directory(stan_path)
        list_dirs = os.walk(source_path)
        for root, dirs, files in list_dirs:
            for f in files:
                label_fname = os.path.join(root, f)
                label = mne.read_label(label_fname)
                stc_label = stc.in_label(label)
                src_pow = np.sum(stc_label.data ** 2, axis=1)
                if label.hemi == "lh":
                    # Get the max MNE value within each ROI
                    seed_vertno = stc_label.vertices[0][np.argmax(src_pow)]
                    func_label = mne.grow_labels(
                        min_subject, seed_vertno, extents=radius, hemis=0, subjects_dir=subjects_dir, n_jobs=1
                    )
                    func_label = func_label[0]
                    func_label.save(stan_path + "%s" % f)
                elif label.hemi == "rh":
                    seed_vertno = stc_label.vertices[1][np.argmax(src_pow)]
                    func_label = mne.grow_labels(
                        min_subject, seed_vertno, extents=radius, hemis=1, subjects_dir=subjects_dir, n_jobs=1
                    )
                    func_label = func_label[0]
                    func_label.save(stan_path + "%s" % f)
Exemple #17
0
def apply_merge(labels_path, evt_list):
    import glob, shutil
    mer_path = labels_path + 'ROIs/merge/'
    reset_directory(mer_path)
    source = []
    for evt in evt_list:
        source_path = labels_path + '/%s/ini/' %evt
        source = source + glob.glob(os.path.join(source_path, '*.*'))
    for filename in source:
        shutil.copy(filename, mer_path)
    reducer = True
    while reducer:
        list_dirs = os.walk(mer_path)
        label_list = ['']
        for root, dirs, files in list_dirs:
            for f in files:
                label_fname = os.path.join(root, f)
                label_list.append(label_fname)
        label_list = label_list[1:]
        len_class = _cluster1_rois(mer_path, label_list)
        if len_class == len(label_list):
            reducer = False  
def group_rois(am_sub=0, com_path=None, mer_path=None):
    """
    choose commont ROIs come out in at least 'sum_sub' subjects
    ----------
    am_sub: the least amount of subjects have the common ROIs.
    com_path: the directory of the common labels.
    mer_path: the directory of the merged rois.
    """
    import shutil
    reset_directory(com_path)
    list_dirs = os.walk(mer_path)
    label_list = ['']
    for root, dirs, files in list_dirs:
        for f in files:
            label_fname = os.path.join(root, f)
            label_list.append(label_fname)
    label_list = label_list[1:]
    for fn_label in label_list:
        fn_name = os.path.basename(fn_label)
        subjects = (fn_name.split('_')[0]).split(',')
        if len(subjects) >= am_sub:
            shutil.copy(fn_label, com_path)
def apply_inverse(fn_epo, event='LLst',ctmin=0.05, ctmax=0.25, nctmin=-0.2, nctmax=0,
                  fmin=4, fmax=8, min_subject='fsaverage', STCs=False):
    """
    Inverse evokes into source space using DICS method.
    ----------
    fn_epo : epochs of raw data.
    event_id: event id related with epochs.
    ctmin: the min time for computing CSD
    ctmax: the max time for computing CSD
    fmin: min value of the interest frequency band
    fmax: max value of the interest frequency band 
    min_subject: the subject for the common brain space.
    STCs: bool, make STCs of epochs.
    """
    from mne import Epochs, pick_types
    from mne.io import Raw
    from mne.event import make_fixed_length_events
    fnlist = get_files_from_list(fn_epo)
    # loop across all filenames
    for fname in fnlist:
        subjects_dir = os.environ['SUBJECTS_DIR']
        # extract the subject infromation from the file name
        meg_path = os.path.split(fname)[0]
        name = os.path.basename(fname)
        stc_name = name[:name.rfind('-epo.fif')] 
        subject = name.split('_')[0]
        subject_path = subjects_dir + '/%s' %subject
        min_dir = subjects_dir + '/%s' %min_subject
        fn_trans = meg_path + '/%s-trans.fif' % subject
        fn_src = subject_path + '/bem/%s-ico-4-src.fif' % subject
        fn_bem = subject_path + '/bem/%s-5120-5120-5120-bem-sol.fif' % subject
        # Make sure the target path is exist
        stc_path = min_dir + '/DICS_ROIs/%s' % subject
        set_directory(stc_path)
        # Read the MNI source space
        epochs = mne.read_epochs(fname)
        evoked = epochs.average()
        forward = mne.make_forward_solution(epochs.info, trans=fn_trans,
                                            src=fn_src, bem=fn_bem,
                                            fname=None, meg=True, eeg=False,
                                            mindist=5.0, n_jobs=2,
                                            overwrite=True)
        forward = mne.convert_forward_solution(forward, surf_ori=True)
        from mne.time_frequency import compute_epochs_csd
        from mne.beamformer import dics
        data_csd = compute_epochs_csd(epochs, mode='multitaper', tmin=ctmin, tmax=ctmax, 
                                      fmin=fmin, fmax=fmax)

        noise_csd = compute_epochs_csd(epochs, mode='multitaper', tmin=nctmin, tmax=nctmax,
                                           fmin=fmin, fmax=fmax)
                
        stc = dics(evoked, forward, noise_csd, data_csd)
        from mne import morph_data
        stc_morph = morph_data(subject, min_subject, stc, grade=4, smooth=4)
        stc_morph.save(stc_path + '/%s_%d_%d' % (stc_name, fmin, fmax), ftype='stc')
        if STCs == True:
            stcs_path = stc_path + '/STCs-%s/' %event
            reset_directory(stcs_path)
            stcs = dics(epochs, forward, noise_csd, data_csd)
            s = 0
            while s < len(stcs):
                stc_morph = mne.morph_data(subject, min_subject, stcs[s], grade=4, smooth=4)
                stc_morph.save(stcs_path + '/trial_%s'
                                % (str(s)), ftype='stc')
                s = s + 1
def apply_rois(fn_stc, event, tmin=0.0, tmax=0.3, tstep=0.05, window=0.2, 
                min_subject='fsaverage', thr=99):
    """
    Compute regions of interest (ROI) based on events
    ----------
    fn_stc : string
        evoked and morphed STC.
    event: string
        event of the related STC.
    tmin, tmax: float
        segment for ROIs definition.
    min_subject: string
        the subject as the common brain space.
    thr: float or int
        threshold of STC used for ROI identification.
    """
    from scipy.signal import detrend
    from scipy.stats.mstats import zscore 
    fnlist = get_files_from_list(fn_stc)
    # loop across all filenames
    for ifn_stc in fnlist:
        subjects_dir = os.environ['SUBJECTS_DIR']
        # extract the subject infromation from the file name
        stc_path = os.path.split(ifn_stc)[0]
        #name = os.path.basename(fn_stc)
        #tri = name.split('_')[1].split('-')[0]
        min_path = subjects_dir + '/%s' % min_subject
        fn_src = min_path + '/bem/fsaverage-ico-4-src.fif'
        # Make sure the target path is exist
        labels_path = stc_path + '/%s/' %event
        reset_directory(labels_path)
        # Read the MNI source space
        src_inv = mne.read_source_spaces(fn_src)
        stc = mne.read_source_estimate(ifn_stc, subject=min_subject)
        #stc = stc.crop(tmin, tmax)
        #src_pow = np.sum(stc.data ** 2, axis=1)
        #stc.data[src_pow < np.percentile(src_pow, thr)] = 0.
        stc = stc.crop(tmin, tmax)
        cal_data = stc.data
        dt_data = detrend(cal_data, axis=-1)
        zc_data = zscore(dt_data, axis=-1)
        src_pow = np.sum(zc_data ** 2, axis=1)
        stc.data[src_pow < np.percentile(src_pow, thr)] = 0.
        tbeg = tmin
        count = 1
        while tbeg < tmax:
            tend = tbeg + window
            if tend > tmax:
                break
            win_stc = stc.copy().crop(tbeg, tend)
            stc_data = win_stc.data 
            src_pow = np.sum(stc_data ** 2, axis=1)
            win_stc.data[src_pow < np.percentile(src_pow, thr)] = 0.
            func_labels_lh, func_labels_rh = mne.stc_to_label(
                win_stc, src=src_inv, smooth=True,
                subjects_dir=subjects_dir,
                connected=True)
            # Left hemisphere definition
            i = 0
            while i < len(func_labels_lh):
                func_label = func_labels_lh[i]
                func_label.save(labels_path + '%s_%s_win%d' % (event, str(i), count))
                i = i + 1
            # right hemisphere definition
            j = 0
            while j < len(func_labels_rh):
                func_label = func_labels_rh[j]
                func_label.save(labels_path +  '%s_%s_win%d' % (event, str(j), count))
                j = j + 1
            tbeg = tbeg + tstep
            count = count + 1
Exemple #21
0
def apply_sigSTC(fn_list_v, vevent, mevent, method='dSPM', vtmin=0., vtmax=0.35,
                 mtmin=-0.3, mtmax=0.05, radius=10.0):
    from mne import spatial_tris_connectivity, grade_to_tris
    from mne.stats import spatio_temporal_cluster_test
    from scipy import stats as stats
    X1, X2 = [], []
    stcs_trial = []
    for fn_v in fn_list_v:
        fn_m = fn_v[: fn_v.rfind('evtW')] + 'evtW_%s_bc_norm_1-lh.stc' %mevent
        stc_v = mne.read_source_estimate(fn_v)
        stcs_trial.append(stc_v.copy())
        stc_m = mne.read_source_estimate(fn_m)
        stc_v.resample(200)
        stc_m.resample(200)
        X1.append(stc_v.copy().crop(vtmin, vtmax).data)
        X2.append(stc_m.copy().crop(mtmin, mtmax).data)
    stcs_path = subjects_dir+'/fsaverage/%s_ROIs/conditions/' %method
    reset_directory(stcs_path)
    fn_avg = stcs_path + '%s' %(vevent)
    stcs = np.array(stcs_trial)
    stc_avg = np.sum(stcs, axis=0)/stcs.shape[0]
    stc_avg.save(fn_avg, ftype='stc')    
    X1 = np.array(X1).transpose(0, 2, 1)
    X2 = np.array(X2).transpose(0, 2, 1)
    ###############################################################################
    # Compute statistic
    
    #    To use an algorithm optimized for spatio-temporal clustering, we
    #    just pass the spatial connectivity matrix (instead of spatio-temporal)
    print('Computing connectivity.')
    connectivity = spatial_tris_connectivity(grade_to_tris(5))
    
    #    Note that X needs to be a list of multi-dimensional array of shape
    #    samples (subjects_k) x time x space, so we permute dimensions
    X = [X1, X2]
    #    Now let's actually do the clustering. This can take a long time...
    #    Here we set the threshold quite high to reduce computation.
    p_threshold = 0.0001
    f_threshold = stats.distributions.f.ppf(1. - p_threshold / 2.,
                                        X1.shape[0] - 1, X1.shape[0] - 1)
    print('Clustering.')
   
    clu = spatio_temporal_cluster_test(X, connectivity=connectivity, n_jobs=2,
                                    threshold=f_threshold)
    #    Now select the clusters that are sig. at p < 0.05 (note that this value
    #    is multiple-comparisons corrected).
    #fsave_vertices = [np.arange(10242), np.arange(10242)]
    tstep = stc_v.tstep
    #stc_all_cluster_vis = summarize_clusters_stc(clu, tstep=tstep,
    #                                            vertices=fsave_vertices,
    #                                            subject='fsaverage')
    #stc_sig = stc_all_cluster_vis.mean()
    #fn_sig = subjects_dir+'/fsaverage/%s_ROIs/%s' %(method,vevent)
    #stc_sig.save(fn_sig)
    tstep = stc_v.tstep
    T_obs, clusters, clu_pvals, _ = clu
    n_times, n_vertices = T_obs.shape
    good_cluster_inds = np.where(clu_pvals < 0.05)[0]
    seeds = []
    #  Build a convenient representation of each cluster, where each
    #  cluster becomes a "time point" in the SourceEstimate
    T_obs = abs(T_obs)
    if len(good_cluster_inds) > 0:
        data = np.zeros((n_vertices, n_times))
        for cluster_ind in good_cluster_inds:
            data.fill(0)
            v_inds = clusters[cluster_ind][1]
            t_inds = clusters[cluster_ind][0]
            data[v_inds, t_inds] = T_obs[t_inds, v_inds]
            # Store a nice visualization of the cluster by summing across time
            data = np.sign(data) * np.logical_not(data == 0) * tstep
            seed = np.argmax(data.sum(axis=-1))
            seeds.append(seed)
    min_subject = 'fsaverage'
    labels_path = subjects_dir + '/fsaverage/dSPM_ROIs/%s/ini' %vevent
    reset_directory(labels_path)
    seeds = np.array(seeds)
    non_index_lh = seeds[seeds < 10242]
    if non_index_lh.shape != []:    
        func_labels_lh = mne.grow_labels(min_subject, non_index_lh,
                                        extents=radius, hemis=0, 
                                        subjects_dir=subjects_dir, n_jobs=1)
        i = 0
        while i < len(func_labels_lh):
            func_label = func_labels_lh[i]
            func_label.save(labels_path + '/%s_%d' %(vevent, i))
            i = i + 1
            
    seeds_rh = seeds - 10242
    non_index_rh = seeds_rh[seeds_rh > 0]
    if non_index_rh.shape != []:
        func_labels_rh = mne.grow_labels(min_subject, non_index_rh,
                                        extents=radius, hemis=1,
                                        subjects_dir=subjects_dir, n_jobs=1)                                             
   
        # right hemisphere definition
        j = 0
        while j < len(func_labels_rh):
            func_label = func_labels_rh[j]
            func_label.save(labels_path + '/%s_%d' %(vevent, j))
            j = j + 1
stmin, stmax = 0.0, 0.4
rtmin, rtmax = -0.4, 0.0 
subjects_dir = os.environ['SUBJECTS_DIR']
method = 'dSPM'
#calculate noise cov from empty room file
#emp_list = glob.glob(subjects_dir+'/*/MEG/*empty-raw.fif')
#apply_create_noise_covariance(emp_list)
#inverse epochs into the source space
epo_st_list = glob.glob(subjects_dir+'/*/MEG/*evtW_%s_bc-epo.fif' %evt_st)
epo_rt_list = glob.glob(subjects_dir+'/*/MEG/*evtW_%s_bc-epo.fif' %evt_rt)
apply_inverse(epo_st_list[:], method=method, event=evt_st)
apply_inverse(epo_rt_list[:], method=method, event=evt_rt)
#make ROIs for special event
stc_st_list = glob.glob(subjects_dir+'/fsaverage/%s_ROIs/*/*,evtW_%s_bc-lh.stc' % (method, evt_st))
stc_rt_list = glob.glob(subjects_dir+'/fsaverage/%s_ROIs/*/*,evtW_%s_bc-lh.stc' % (method, evt_rt))
apply_rois(stc_st_list, event=evt_st, tmin=stmin, tmax=stmax)
apply_rois(stc_rt_list, event=evt_rt, tmin=rtmin, tmax=rtmax)
#merge kinds of ROIs together for each subject
labels_path = glob.glob(subjects_dir+'/fsaverage/%s_ROIs/*[0-9]' %method) 
merge_rois(labels_path, group=False, evelist=['LLst','LLrt'])
#standardize the size of ROIs and interegrate all the subjects ROIs
stan_path = subjects_dir+'/fsaverage/%s_ROIs/standard/' %method
reset_directory(stan_path)
stan_rois(stc_st_list, stan_path, size=8.0)
#merge ROIs across subjects, and select the common ROIs
labels_path = subjects_dir+'/fsaverage/%s_ROIs/' %method
merge_rois(labels_path, group=True)
mer_path = subjects_dir+'/fsaverage/%s_ROIs/merged/' %method
com_path = subjects_dir+'/fsaverage/%s_ROIs/common/' %method
group_rois(am_sub=8, com_path=com_path, mer_path=mer_path)
def apply_inverse(fnepo, method='dSPM', event='LLst', min_subject='fsaverage', STC_US='ROI', 
                  snr=5.0):
    '''  
        Parameter
        ---------
        fnepo: string or list
            The epochs file with ECG, EOG and environmental noise free.
        method: inverse method, 'MNE' or 'dSPM'
        event: string
            The event name related with epochs.
        min_subject: string
            The subject name as the common brain.
        STC_US: string
            The using of the inversion for further analysis.
            'ROI' stands for ROIs definition, 'CAU' stands for causality analysis.
        snr: signal to noise ratio for inverse solution. 
    '''
    #Get the default subjects_dir
    from mne.minimum_norm import (apply_inverse, apply_inverse_epochs)
    subjects_dir = os.environ['SUBJECTS_DIR']
    fnlist = get_files_from_list(fnepo)
    # loop across all filenames
    for fname in fnlist:
        fn_path = os.path.split(fname)[0]
        name = os.path.basename(fname)
        stc_name = name[:name.rfind('-epo.fif')] 
        subject = name.split('_')[0]
        subject_path = subjects_dir + '/%s' %subject
        min_dir = subjects_dir + '/%s' %min_subject
        fn_trans = fn_path + '/%s-trans.fif' % subject
        fn_cov = fn_path + '/%s_empty,nr-cov.fif' % subject
        fn_src = subject_path + '/bem/%s-ico-4-src.fif' % subject
        fn_bem = subject_path + '/bem/%s-5120-5120-5120-bem-sol.fif' % subject
        snr = snr
        lambda2 = 1.0 / snr ** 2 
        #noise_cov = mne.read_cov(fn_cov)
        epochs = mne.read_epochs(fname)
        noise_cov = mne.read_cov(fn_cov)
        if STC_US == 'ROI':
            # this path used for ROI definition
            stc_path = min_dir + '/%s_ROIs/%s' %(method,subject)
            #fn_cov = meg_path + '/%s_empty,fibp1-45,nr-cov.fif' % subject
            evoked = epochs.average()
            set_directory(stc_path)
            noise_cov = mne.cov.regularize(noise_cov, evoked.info,
                                            mag=0.05, grad=0.05, proj=True)
            fwd_ev = mne.make_forward_solution(evoked.info, trans=fn_trans,
                                                src=fn_src, bem=fn_bem,
                                                fname=None, meg=True, eeg=False,
                                                mindist=5.0, n_jobs=2,
                                                overwrite=True)
            fwd_ev = mne.convert_forward_solution(fwd_ev, surf_ori=True)
            forward_meg_ev = mne.pick_types_forward(fwd_ev, meg=True, eeg=False)
            inverse_operator_ev = mne.minimum_norm.make_inverse_operator(
                evoked.info, forward_meg_ev, noise_cov,
                loose=0.2, depth=0.8)
            # Compute inverse solution
            stc = apply_inverse(evoked, inverse_operator_ev, lambda2, method,
                                pick_ori=None)
            # Morph STC
            subject_id = min_subject
            stc_morph = mne.morph_data(subject, subject_id, stc, grade=5, smooth=5)
            stc_morph.save(stc_path + '/%s' % (stc_name), ftype='stc')
    
        elif STC_US == 'CAU':
            stcs_path = min_dir + '/stcs/%s/%s/' % (subject,event)
            reset_directory(stcs_path)
            noise_cov = mne.cov.regularize(noise_cov, epochs.info,
                                            mag=0.05, grad=0.05, proj=True)
            fwd = mne.make_forward_solution(epochs.info, trans=fn_trans,
                                            src=fn_src, bem=fn_bem,
                                            meg=True, eeg=False, mindist=5.0,
                                            n_jobs=2, overwrite=True)
            fwd = mne.convert_forward_solution(fwd, surf_ori=True)
            forward_meg = mne.pick_types_forward(fwd, meg=True, eeg=False)
            inverse_operator = mne.minimum_norm.make_inverse_operator(
                epochs.info, forward_meg, noise_cov, loose=0.2,
                depth=0.8)
            # Compute inverse solution
            stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2,
                                        method=method, pick_ori='normal')
            s = 0
            while s < len(stcs):
                stc_morph = mne.morph_data(
                    subject, min_subject, stcs[s], grade=5, smooth=5)
                stc_morph.save(stcs_path + '/trial%s_fsaverage'
                                % (subject, str(s)), ftype='stc')
                s = s + 1
Exemple #24
0
import numpy as np
from dirs_manage import reset_directory
subjects_dir = os.environ['SUBJECTS_DIR']
min_subject = 'fsaverage'
method = 'dSPM'
event = 'LLst'
tmin, tmax = 0.0, 0.6
window, tstep = 0.2, 0.05
thr = 99
fn_stc = '/home/uais/data/Chrono/18subjects/fsaverage/dSPM_ROIs/203731/203731_Chrono,nr,ocarta,evtW_LLst_bc-lh.stc'
stc_path = os.path.split(fn_stc)[0]
min_path = subjects_dir + '/%s' % min_subject
fn_src = min_path + '/bem/fsaverage-ico-5-src.fif'
# Make sure the target path is exist
labels_path = stc_path + '/%s/' %event
reset_directory(labels_path)
# Read the MNI source space
src_inv = mne.read_source_spaces(fn_src)
stc = mne.read_source_estimate(fn_stc, subject=min_subject)
stc = stc.crop(tmin, tmax)
src_pow = np.sum(stc.data ** 2, axis=1)
stc.data[src_pow < np.percentile(src_pow, thr)] = 0.
tbeg = tmin
count = 1
while tbeg < tmax:
    tend = tbeg + window
    if tend > tmax:
        break
    win_stc = stc.copy().crop(tbeg, tend)
    stc_data = win_stc.data 
    src_pow = np.sum(stc_data ** 2, axis=1)