Exemplo n.º 1
0
    rois = json.load(f)

roi_colors = {'EAC': rois['EAC']['A1'], 'V1': rois['EVC']['V1']}

rois['V1'] = {'V1': 1}
rois.pop('EVC')

# Create separate masks
masks = {}
for roi in rois:
    mask = np.zeros(mmp.shape)
    for area in rois[roi]:
        mask[mmp == rois[roi][area]] = 1

    write_gifti(
        mask,
        join(tpl_dir, f'tpl-fsaverage6_hemi-{hemi}_desc-{roi}_mask.label.gii'),
        join(tpl_dir, f'tpl-fsaverage6_hemi-{hemi}_desc-MMP_dseg.label.gii'))

    masks[roi] = mask.astype(bool)
    n_voxels = np.sum(mask)
    np.save(join(tpl_dir, f'tpl-fsaverage6_hemi-{hemi}_desc-{roi}_mask.npy'),
            mask)
    print(f"Created {hemi} {roi} mask containing " f"{n_voxels} vertices")

# Create single parcellation map
mask_map = np.zeros(mmp.shape)
for i, mask_name in enumerate(masks):
    mask_map[masks[mask_name]] = i + 1

write_gifti(
    mask_map,
Exemplo n.º 2
0
                # Compute ISCs
                print(f"Started ISC analysis for {subtask} ({hemi})")
                iscs = isc(data)
                print(f"Finished ISC analysis for {subtask} ({hemi})")

                # Split ISCs into subject-/run-specific GIfTI files
                assert len(subject_list) == len(run_list) == len(iscs)
                for s, fn, r in zip(subject_list, run_list, iscs):
                    isc_fn = join(
                        afni_dir, s, 'func',
                        fn.replace('_desc-clean.func.gii',
                                   '_isc.gii').replace(f'task-{task}',
                                                       f'task-{subtask}'))
                    template_fn = join(afni_dir, s, 'func', fn)
                    write_gifti(r, isc_fn, template_fn)
                    print(f"Saved {subtask} {s} ({hemi}) ISC")


# Custom mean estimator with Fisher z transformation for correlations
def fisher_mean(correlation, axis=None):
    return np.tanh(np.nanmean(np.arctanh(correlation), axis=axis))


# Compute mean ISC maps per task
for hemi in ['L', 'R']:

    global_isc = []
    for task in task_meta:

        # Skip 'schema' task for simplicity
Exemplo n.º 3
0

# Compute mean across all input images
chdir(afni_dir)
for hemi in ['L', 'R']:
    input_fns = join(afni_dir, 'sub-*', 'func',
                     f'sub-*_task-*_space-{space}_'
                     f'hemi-{hemi}_desc-tsnr.func.gii')
    mean_fn = join(afni_dir, f'group_space-{space}_hemi-{hemi}_'
                             'desc-mean_tsnr.gii')
    run(f'3dMean -verbose -prefix {mean_fn} {input_fns}', shell=True)
    print(f"Finished computing mean tSNR for {hemi} hemisphere")
    
    
# Compute median across all input images
for hemi in ['L', 'R']:
    input_fns = glob(join(afni_dir, 'sub-*', 'func',
                          f'sub-*_task-*_space-{space}_'
                          f'hemi-{hemi}_desc-tsnr.func.gii'))
    tsnr_stack = []
    for input_fn in input_fns:
        tsnr_stack.append(read_gifti(input_fn))
    tsnr_stack = np.vstack(tsnr_stack)
    print(f"Loaded all {tsnr_stack.shape[0]} tSNR maps")
    
    tsnr_median = np.median(tsnr_stack, axis=0)
    median_fn = join(afni_dir, f'group_space-{space}_hemi-{hemi}_'
                               'desc-median_tsnr.gii')

    write_gifti(tsnr_median, median_fn, input_fns[0])
    print(f"Finished computing median tSNR for {hemi} hemisphere")
Exemplo n.º 4
0
# Pull in any subject's FreeSurfer parcellation
# All subjects in fsaverage6 space have the same medial NaNs
subject = 'sub-001'
task = 'tunnel'

# Check against known number of medial wall vertices
n_medial = {'L': 3486, 'R': 3491}

for hemi in ['L', 'R']:
    fs6_fn = join(deriv_dir, 'fmriprep', subject, 'func',
                  f'{subject}_task-{task}_space-{space}_hemi-{hemi}.func.gii')
    fs6 = read_gifti(fs6_fn)[0]

    # Get the medial wall NaNs output by fMRIPrep
    medial_vs = np.isnan(fs6)
    assert np.sum(medial_vs) == n_medial[hemi]
    print(f"Excluding {np.sum(medial_vs)} {hemi} medial wall vertices")

    # Negate to get a cortex-only mask
    cortical_vs = (~medial_vs).astype(float)

    mask_fn = join(mask_dir, f'tpl-{space}_hemi-{hemi}_desc-cortex_mask.gii')
    write_gifti(cortical_vs, mask_fn, fs6_fn)

    # Make a 1D file for SUMA's SurfSmooth -c_mask
    mask_1D = mask_fn.replace('.gii', '.1D')
    with open(mask_1D, 'w') as f:
        f.write('\n'.join(cortical_vs.astype(int).astype(str).tolist()))
    print(f"Created {hemi} {space} cortex mask")
Exemplo n.º 5
0
roi_colors = {
    'EAC': rois['EAC']['A1'],
    'AAC': rois['AAC']['STSdp'],
    'PCC': rois['PCC']['POS2'],
    'PMC': rois['PCC']['POS2'],
    'TPOJ': rois['TPOJ']['TPOJ1']
}

# Create separate masks
masks = {}
for roi in rois:
    mask = np.zeros(mmp.shape)
    for area in rois[roi]:
        mask[mmp == rois[roi][area]] = 1

    write_gifti(mask, f'data/MMP_{roi}_fsaverage6.{hemi}.gii',
                f'data/MMP_fsaverage6.{hemi}.gii')

    masks[roi] = mask.astype(bool)
    n_voxels = np.sum(mask)
    np.save(f'data/{roi}_mask_{hemi}.npy', mask)
    print(f"Created {hemi} {roi} mask containing " f"{n_voxels} voxels")

# Create single parcellation map
mask_map = np.zeros(mmp.shape)
for i, mask_name in enumerate(masks):
    mask_map[masks[mask_name]] = i + 1

write_gifti(mask_map, f'data/MMP_ROIs_fsaverage6.{hemi}.gii',
            f'data/MMP_fsaverage6.{hemi}.gii')