def parse_z(path, conditions):
    """
    Get images, sub, group and labels from a directory of images

    Parameters
    ----------

    path: str
          path to the dir with the images. No files besides the images can be
          in the directory. Images must be .nii or .nii.gz images

    conditions: list of str
                labels to search for in the target folders

    Returns
    -------

    filenames: list
               list of paths the z-maps of the directory
    """

    # Get all (subject, session) pairs
    task_list = ['audio1', 'audio2']
    
    filenames = []

    for task in task_list:
        session_list = sorted(get_subject_session(task))
            
        for sub, ses in session_list:
            session_path = os.path.join(path, sub, ses)
            if not os.path.exists(session_path):
                print("Folder not found for {}, {}. Skipping...".format(sub, ses))
                continue

            print("Code reached for {}, {}".format(sub, ses))
            # ses_glob = glob.glob(os.path.join(session_path, "res_stats_audio_*_*/stat_maps"))
            ses_glob = glob.glob(os.path.join(session_path, "res_stats_audio_*_*/z_score_maps"))
            for run_glob in ses_glob:
                try:
                    file_list = [os.path.join(run_glob, file) for
                                 file in os.listdir(run_glob) if
                                 any(x + '-others.nii.gz' in file for x in conditions)]
                except IndexError:
                    print("Found empty folder for {}, {}. Skipping...".format(sub, ses))
                    continue

                filenames.extend(file_list)

    return filenames
Пример #2
0
    # Read the jobfile
    list_subjects, params = _generate_preproc_pipeline(output_name)
    # Preproc and Dump data
    subject_data = do_subjects_preproc(output_name, report=True)
    return subject_data


if __name__ == '__main__':
    # correction of distortion_parameters
    # custom solution, to be improved in the future
    main_dir = '/neurospin/ibc/'
    cache_dir = '/neurospin/tmp/ibc'
    prepare_derivatives(main_dir)
    do_topup = True
    protocol = 'biological_motion'
    subject_session = sorted(get_subject_session([protocol]))
    # subject_session = [('sub-04', 'ses-28'), ('sub-05', 'ses-28'),]
    if do_topup:
        acq = None
        if protocol in ['rs']:
            acq = 'mb6'
        elif protocol in ['mtt1', 'mtt2']:
            acq = 'mb3'
        apply_topup(main_dir, cache_dir, subject_session, acq=acq)

    subject_data = []
    jobfile = 'ini_files/IBC_preproc_%s.ini' % protocol
    subject_data_ = Parallel(n_jobs=3)(
        delayed(run_subject_preproc)(jobfile, subject, session)
        for subject, session in subject_session)
    subject_data = subject_data + subject_data_[0]
Пример #3
0
                            smooth=smooth,
                            mask_img=mask_img)
            else:
                first_level(subject,
                            compcorr=True,
                            smooth=smooth,
                            mask_img=mask_img)
                fixed_effects_analysis(subject, mask_img=mask_img)


if __name__ == '__main__':
    prepare_derivatives(IBC)
    protocols = ['mtt1']
    for protocol in protocols:
        jobfile = 'ini_files/IBC_preproc_%s.ini' % protocol
        subject_session = get_subject_session(protocol)
        subject_session = [('sub-08', 'ses-11')]

        Parallel(n_jobs=1)(delayed(run_subject_glm)(
            jobfile, protocol, subject, session, lowres=True, smooth=5)
                           for (subject, session) in subject_session)

    smooth = 5
    for protocol in protocols:
        jobfile = 'ini_files/IBC_preproc_%s.ini' % protocol
        Parallel(n_jobs=4)(delayed(run_subject_glm)(jobfile, protocol, subject,
                                                    session, smooth)
                           for (subject, session) in subject_session)

    smooth = None
    for protocol in protocols:
                                   t_r=t_r,
                                   use_3mm=use_3mm,
                                   glm_mode=glm_mode)

    cv_score = decode_inter(imgs, names, mask, pipeline, conf_matrix=conf_matrix)

    return cv_score


# Model
svc = LinearSVC(max_iter=10000)
feature_selection = SelectPercentile(f_classif, percentile=25)
pipeline = Pipeline([('anova', feature_selection), ('svc', svc)])

# Subjects
session_1 = sorted(get_subject_session([task_list[0]]))
session_2 = sorted(get_subject_session([task_list[1]]))

session_list = [(sub1, (ses1, ses2)) for
                (sub1, ses1), (sub2, ses2) in zip(session_1, session_2)]

# Classification
scores = make_decode_inter(session_list,
                           conditions,
                           data_dir=data_dir,
                           write_dir=write_dir,
                           mask=mask_gm,
                           t_r=t_r,
                           pipeline=pipeline,
                           use_3mm=False,
                           glm_mode=glm_mode,
Пример #5
0
    list_subjects, params = _generate_preproc_pipeline(output_name)
    # Preproc and Dump data
    subject_data = do_subjects_preproc(output_name, report=True)
    return subject_data


if __name__ == '__main__':
    # correction of distortion_parameters
    # custom solution, to be improved in the future
    main_dir = '/neurospin/ibc/'
    cache_dir = '/neurospin/tmp/ibc'
    prepare_derivatives(main_dir)

    do_topup = True
    protocol = 'clips4'  # 'clips1', 'clips2', 'clips3', 'clips4', 'archi', 'hcp2' 'tom' 'preferences'
    subject_session = sorted(get_subject_session(protocol))[-1:]

    if do_topup:
        acq = None
        if protocol in ['rs']:
            acq = 'mb6'
        elif protocol in ['mtt1', 'mtt2']:
            acq = 'mb3'
        apply_topup(main_dir, cache_dir, subject_session, acq=acq)

    subject_data = []
    for protocol in [protocol]:
        jobfile = 'ini_files/IBC_preproc_%s.ini' % protocol
        subject_data_ = Parallel(n_jobs=1)(
            delayed(run_subject_preproc)(jobfile, subject, session)
            for subject, session in subject_session)
Пример #6
0
    # Read the jobfile
    list_subjects, params = _generate_preproc_pipeline(output_name)
    # Preproc and Dump data
    subject_data = do_subjects_preproc(output_name, report=True)
    return subject_data


if __name__ == '__main__':
    # correction of distortion_parameters
    # custom solution, to be improved in the future
    main_dir = '/neurospin/ibc/'
    cache_dir = '/neurospin/tmp/ibc'
    prepare_derivatives(main_dir)
    do_topup = True
    protocol = 'BBT3'
    subject_session = sorted(get_subject_session(['bbt3']))
    # subject_session = [('sub-15', 'ses-29')]
    if do_topup:
        acq = None
        if protocol in ['rs']:
            acq = 'mb6'
        elif protocol in ['mtt1', 'mtt2']:
            acq = 'mb3'
        apply_topup(main_dir, cache_dir, subject_session, acq=acq)

    subject_data = []
    jobfile = 'ini_files/IBC_preproc_%s.ini' % protocol
    subject_data_ = Parallel(n_jobs=1)(
        delayed(run_subject_preproc)(jobfile, subject, session)
        for subject, session in subject_session)
    subject_data = subject_data + subject_data_[0]
Пример #7
0
    X = rs_masker.transform(rs_fmri)
    seeds = roi_masker.transform(rs_fmri)
    normalization = np.sqrt(X.shape[0])
    X = (X.T - X.mean(1)).T
    X = (X.T / X.std(1)).T / normalization
    seeds = (seeds.T - seeds.mean(1)).T
    seeds = (seeds.T / seeds.std(1)).T / normalization
    correlation = np.dot(seeds.T, X)
    return correlation


q = 0
correlations = []
for subject in SUBJECTS:
    for task in ['mtt1', 'mtt2']:
        subject_session = np.array(get_subject_session(task))
        session = subject_session[subject_session[:, 0] == subject][0][1]
        for acq in ['ap', 'pa']:
            print(subject, session, acq)
            correlation = compute_connectivity(subject, session, acq)
            correlations.append(correlation)
        if q == 0:
            mean_correlation = correlation
        else:
            mean_correlation += correlation
        q += 1
mean_correlation /= q
correlations = np.array(correlations)
X = np.rollaxis(correlations, 1)
import scipy.stats as sts