def SubjectShuffleSplit(dataset,
                        groups,
                        n_iter=100,
                        test_size=.3,
                        random_state=42):
    """ Specific ShuffleSplit (train on all subject images,
    but test only on one image of the remaining subjects)"""

    idx = set_group_indices(dataset.dx_group)
    groups_idx = np.hstack([idx[group] for group in groups])

    subjects = np.asarray(dataset.subjects)
    subjects = subjects[groups_idx]
    subjects_unique = np.unique(subjects)

    n = len(subjects_unique)
    ss = ShuffleSplit(n,
                      n_iter=n_iter,
                      test_size=test_size,
                      random_state=random_state)

    subj_ss = []
    for train, test in ss:
        train_set = np.array([], dtype=int)
        for subj in subjects_unique[train]:
            subj_ind = np.where(subjects == subj)
            train_set = np.concatenate((train_set, subj_ind[0]))
        test_set = np.array([], dtype=int)
        for subj in subjects_unique[test]:
            subj_ind = np.where(subjects == subj)
            test_set = np.concatenate((test_set, subj_ind[0]))
        subj_ss.append([train_set, test_set])
    return subj_ss
def StratifiedSubjectShuffleSplit(dataset,
                                  groups,
                                  n_iter=100,
                                  test_size=.3,
                                  random_state=42):
    """ Stratified ShuffleSplit on subjects
    (train and test size may change depending on the number of acquistions)"""

    idx = set_group_indices(dataset.dx_group)
    groups_idx = np.hstack([idx[group] for group in groups])

    subjects = np.asarray(dataset.subjects)
    subjects = subjects[groups_idx]

    dx = np.asarray(dataset.dx_group)
    dx = dx[groups_idx]

    # extract unique subject ids and dx
    subjects_unique_values, \
    subjects_unique_indices = np.unique(subjects, return_index=True)

    # extract indices for the needed groups
    dx_unique_values = dx[subjects_unique_indices]
    y = dx_unique_values

    # generate folds stratified on dx
    sss = StratifiedShuffleSplit(y,
                                 n_iter=n_iter,
                                 test_size=test_size,
                                 random_state=random_state)
    ssss = []
    for tr, ts in sss:
        # get training subjects
        subjects_tr = subjects_unique_values[tr]

        # get testing subjects
        subjects_ts = subjects_unique_values[ts]

        # get all subject indices
        train = []
        test = []
        for subj in subjects_tr:
            train.extend(np.where(subjects == subj)[0])
        for subj in subjects_ts:
            test.extend(np.where(subjects == subj)[0])

        # append ssss
        ssss.append([train, test])
    return ssss
 def __init__(self,
              imgs,
              mask,
              atlas,
              dx_group,
              rois=False,
              n_iter=100,
              memory=CACHE_DIR,
              memory_level=2,
              n_jobs=-1,
              random_state=42):
     """ initialization
     """
     self.imgs = np.array(imgs)
     self.mask = mask
     self.atlas = atlas
     self.rois = rois
     self.n_jobs = n_jobs
     self.n_iter = n_iter
     self.memory = memory
     self.memory_level = memory_level
     self.idx = set_group_indices(dx_group)
     self.random_state = random_state
コード例 #4
0
                    metric=metric,
                    mask=mask,
                    detrend=True,
                    low_pass=.1,
                    high_pass=.01,
                    t_r=3.,
                    resampling_target='data',
                    smoothing_fwhm=6.,
                    memory=CACHE_DIR,
                    memory_level=2,
                    n_jobs=20)

fc = conn.fit(dataset.func)
np.savez_compressed('longitudinal_dartel_fc', data=fc)

idx = set_group_indices(dataset.dx_group)
groups = [['AD', 'MCI'], ['AD', 'Normal'], ['MCI', 'Normal']]

for g in groups:
    t, p = stats.ttest_ind(fc[idx[g[0]]], fc[idx[g[1]]])
    tv = t
    tv[np.where(p > .01)] = 0

    n_rois = conn.rois['n_rois']
    centroids = conn.rois['rois_centroids']
    ind = np.tril_indices(n_rois, k=-1)
    m = np.zeros((n_rois, n_rois))
    m[ind] = tv
    m = .5 * (m + m.T)
    plot_connectome(m, centroids, title='_'.join(g))