Example #1
0
def nifti_to_dataset(nifti_file, attr_file=None, annot_file=None, subject_id=None, session_id=None):

    logger.info("Loading fmri dataset: {}".format(nifti_file))
    ds = fmri_dataset(samples=nifti_file)

    if attr_file is not None:
        logger.info("Loading attributes: {}".format(attr_file))
        attr = ColumnData(attr_file)
        valid = min(ds.nsamples, attr.nrows)
        valid = int(valid / 180) * 180  # FIXME: ...
        print valid
        ds = ds[:valid, :]
        for k in attr.keys():
            ds.sa[k] = attr[k][:valid]

    if annot_file is not None:
        logger.info("Loading annotation: {}".format(annot_file))
        annot = nibabel.freesurfer.io.read_annot(annot_file)
        ds.fa["annotation"] = [annot[2][i] for i in annot[0]]  # FIXME: roi cannot be a fa

    if subject_id is not None:
        ds.sa["subject_id"] = [subject_id] * ds.nsamples

    if session_id is not None:
        ds.sa["session_id"] = [session_id] * ds.nsamples

    return ds
Example #2
0
 def tofile(self, filename):
     """Write data to a FSL EV3 file.
     """
     ColumnData.tofile(self, filename,
                       header=False,
                       header_order=['onsets', 'durations', 'intensities'],
                       sep=' ')
Example #3
0
 def tofile(self, filename):
     """Write motion parameters to file.
     """
     ColumnData.tofile(self, filename,
                       header=False,
                       header_order=McFlirtParams.header_def,
                       sep=' ')
Example #4
0
 def tofile(self, filename):
     """Write data to a FSL EV3 file.
     """
     ColumnData.tofile(self, filename,
                       header=False,
                       header_order=['onsets', 'durations', 'intensities'],
                       sep=' ')
Example #5
0
 def tofile(self, filename):
     """Write motion parameters to file.
     """
     ColumnData.tofile(self, filename,
                       header=False,
                       header_order=McFlirtParams.header_def,
                       sep=' ')
Example #6
0
def nifti_to_dataset(nifti_file,
                     attr_file=None,
                     annot_file=None,
                     subject_id=None,
                     session_id=None):

    logger.info('Loading fmri dataset: {}'.format(nifti_file))
    ds = fmri_dataset(samples=nifti_file)

    if attr_file is not None:
        logger.info('Loading attributes: {}'.format(attr_file))
        attr = ColumnData(attr_file)
        valid = min(ds.nsamples, attr.nrows)
        valid = int(valid / 180) * 180  #FIXME: ...
        logger.info('valid samples: {}'.format(valid))
        ds = ds[:valid, :]
        for k in attr.keys():
            ds.sa[k] = attr[k][:valid]

    if annot_file is not None:
        logger.info('Loading annotation: {}'.format(annot_file))
        annot = nibabel.freesurfer.io.read_annot(annot_file)
        ds.fa['annotation'] = [annot[2][i]
                               for i in annot[0]]  #FIXME: roi cannot be a fa

    if subject_id is not None:
        ds.sa['subject_id'] = [subject_id] * ds.nsamples

    if session_id is not None:
        ds.sa['session_id'] = [session_id] * ds.nsamples

    return ds
Example #7
0
    def __init__(self, source):
        """Initialize McFlirtParams

        Parameters
        ----------
        source : str
          Filename of a parameter file.
        """
        ColumnData.__init__(self, source, header=McFlirtParams.header_def, sep=None, dtype=float)
Example #8
0
    def __init__(self, source):
        """Read and write FSL EV3 files.

        Parameters
        ----------
        source : str
          Filename of an EV3 file
        """
        # init data from known format
        ColumnData.__init__(self, source, header=["onsets", "durations", "intensities"], sep=None, dtype=float)
Example #9
0
    def __init__(self, source):
        """Initialize McFlirtParams

        Parameters
        ----------
        source : str
          Filename of a parameter file.
        """
        ColumnData.__init__(self, source,
                            header=McFlirtParams.header_def,
                            sep=None, dtype=float)
Example #10
0
def create_within_subject_workflow(name, work_dir, sessions_file,
                                   session_template, scan_list, fs_dir):

    #initialize workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    sessions_info = ColumnData(sessions_file, dtype=str)
    subject_ids = set(sessions_info['subject_id'])
    session_map = [(sid,
                    [s for i, s in zip(*sessions_info.values()) if i == sid])
                   for sid in subject_ids]

    ##for each subject
    subjects = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
                       name='subjects')
    subjects.iterables = [('subject_id', subject_ids)]

    ##for each session
    sessions = pe.Node(
        interface=util.IdentityInterface(fields=['subject_id', 'session_dir']),
        name='sessions')
    sessions.itersource = ('subjects', 'subject_id')
    sessions.iterables = [('session_dir', dict(session_map))]
    workflow.connect(subjects, 'subject_id', sessions, 'subject_id')

    #get session directory
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template),
                              name='get_session_dir')
    workflow.connect(sessions, 'session_dir', get_session_dir, 'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir, 'session_dir', datasink,
                     'base_directory')

    template = {'functional': 'mri/f.nii.gz'}
    get_files = pe.Node(nio.SelectFiles(template), name='get_files')
    workflow.connect(get_session_dir, 'session_dir', get_files,
                     'base_directory')

    join_sessions = pe.JoinNode(
        interface=util.IdentityInterface(fields=['functionals']),
        name='join_sessions',
        joinsource='sessions')
    workflow.connect(get_files, 'functional', join_sessions, 'functionals')

    within_subject_align = create_between_run_align_workflow(
        name='within_subject_align')
    workflow.connect(join_sessions, 'functionals', within_subject_align,
                     'inputs.in_files')

    return workflow
Example #11
0
    def __init__(self, source):
        """Read and write BrainVoyager RTC files.

        Parameters
        ----------
        source : str
          Filename of an RTC file
        """
        # init data from known format
        ColumnData.__init__(self, source, header=True,
                            sep=None, headersep='"', dtype=float, skiplines=5)
Example #12
0
    def __init__(self, source):
        """Read and write FSL EV3 files.

        Parameters
        ----------
        source : str
          Filename of an EV3 file
        """
        # init data from known format
        ColumnData.__init__(self, source,
                            header=['onsets', 'durations', 'intensities'],
                            sep=None, dtype=float)
Example #13
0
def create_within_subject_workflow(name,
                                   work_dir,
                                   sessions_file,
                                   session_template,
                                   scan_list,
                                   fs_dir):
    
    #initialize workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    sessions_info = ColumnData(sessions_file, dtype=str)
    subject_ids = set(sessions_info['subject_id'])
    session_map = [(sid,[s for i,s in zip(*sessions_info.values()) if i == sid])
                   for sid in subject_ids]

    ##for each subject
    subjects = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name='subjects')
    subjects.iterables = [('subject_id', subject_ids)]

    ##for each session
    sessions = pe.Node(interface=util.IdentityInterface(fields=['subject_id','session_dir']), name='sessions')
    sessions.itersource = ('subjects','subject_id')
    sessions.iterables = [('session_dir', dict(session_map))]
    workflow.connect(subjects,'subject_id',sessions,'subject_id')

    #get session directory
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template), name='get_session_dir')
    workflow.connect(sessions,'session_dir',get_session_dir,'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir,'session_dir',datasink,'base_directory')

    template = {'functional':'mri/f.nii.gz'}
    get_files = pe.Node(nio.SelectFiles(template), name='get_files')
    workflow.connect(get_session_dir,'session_dir',get_files,'base_directory')

    join_sessions = pe.JoinNode(interface=util.IdentityInterface(fields=['functionals']),
                                name='join_sessions',
                                joinsource='sessions')
    workflow.connect(get_files,'functional',join_sessions,'functionals')

    within_subject_align = create_between_run_align_workflow(name='within_subject_align')
    workflow.connect(join_sessions,'functionals',within_subject_align,'inputs.in_files')
    
    return workflow
Example #14
0
    def __init__(self, source):
        """Read and write BrainVoyager RTC files.

        Parameters
        ----------
        source : str
          Filename of an RTC file
        """
        # init data from known format
        ColumnData.__init__(self,
                            source,
                            header=True,
                            sep=None,
                            headersep='"',
                            dtype=float,
                            skiplines=5)
Example #15
0
def create_ml_preprocess_workflow(
        name,
        project_dir,
        work_dir,
        sessions_file,
        session_template,
        fs_dir,
        annot_template='{subject_id}/label/{hemi}.aparc.a2009s.annot',
        fwhm_vals=[2],
        ico_order_vals=[4],
        do_save_vol_ds=False,
        do_save_smooth_vol_ds=False,
        do_save_surface_smooth_vol_ds=False,
        do_save_surface_ds=False,
        do_save_smooth_surface_ds=False,
        do_save_sphere_nifti=False,
        do_save_sphere_ds=True,
        do_save_join_sessions_ds=True,
        do_save_join_subjects_ds=True):

    #initialize workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    sessions_info = ColumnData(sessions_file, dtype=str)
    subject_ids = set(sessions_info['subject_id'])
    session_map = [
        (sid, [s for i, s, r in zip(*sessions_info.values()) if i == sid])
        for sid in subject_ids
    ]

    ##for each subject
    subjects = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
                       name='subjects')
    subjects.iterables = [('subject_id', subject_ids)]

    ##for each session
    sessions = pe.Node(
        interface=util.IdentityInterface(fields=['subject_id', 'session_dir']),
        name='sessions')
    sessions.itersource = ('subjects', 'subject_id')
    sessions.iterables = [('session_dir', dict(session_map))]
    workflow.connect(subjects, 'subject_id', sessions, 'subject_id')

    #get session directory
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template),
                              name='get_session_dir')
    workflow.connect(sessions, 'session_dir', get_session_dir, 'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir, 'session_dir', datasink,
                     'base_directory')

    template = {
        'nifti_file': 'mri/f.nii.gz',
        'attributes_file': 'attributes.txt',
        'reg_file': 'mri/transforms/functional_to_anatomy.dat'
    }
    get_files = pe.Node(nio.SelectFiles(template), name='get_files')
    workflow.connect(get_session_dir, 'session_dir', get_files,
                     'base_directory')

    vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='vol_to_ds')
    vol_to_ds.inputs.ds_file = 'vol.hdf5'

    workflow.connect(get_files, 'nifti_file', vol_to_ds, 'nifti_file')
    workflow.connect(get_files, 'attributes_file', vol_to_ds,
                     'attributes_file')
    workflow.connect(subjects, 'subject_id', vol_to_ds, 'subject_id')
    workflow.connect(sessions, 'session_dir', vol_to_ds, 'session_id')

    if do_save_vol_ds:
        workflow.connect(vol_to_ds, 'ds_file', datasink, 'ml.@vol')

    fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']), name='fwhm')
    fwhm.iterables = [('fwhm', fwhm_vals)]

    if do_save_smooth_vol_ds:
        smooth_vol = pe.Node(interface=fs.MRIConvert(), name='smooth_vol')
        workflow.connect(get_files, 'nifti_file', smooth_vol, 'in_file')
        workflow.connect(fwhm, 'fwhm', smooth_vol, 'fwhm')

        smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                   name='smooth_vol_to_ds')
        smooth_vol_to_ds.inputs.ds_file = 'smooth_vol.hdf5'

        workflow.connect(smooth_vol, 'out_file', smooth_vol_to_ds,
                         'nifti_file')
        workflow.connect(get_files, 'attributes_file', smooth_vol_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', smooth_vol_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', smooth_vol_to_ds,
                         'session_id')

        workflow.connect(smooth_vol_to_ds, 'ds_file', datasink,
                         'ml.@smooth_vol')

    if do_save_surface_smooth_vol_ds:
        surface_smooth_vol = pe.Node(interface=fs.Smooth(),
                                     name='surface_smooth_vol')
        workflow.connect(get_files, 'reg_file', surface_smooth_vol, 'reg_file')
        workflow.connect(get_files, 'nifti_file', surface_smooth_vol,
                         'in_file')
        workflow.connect(fwhm, 'fwhm', surface_smooth_vol, 'surface_fwhm')

        surface_smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                           name='surface_smooth_vol_to_ds')
        surface_smooth_vol_to_ds.inputs.ds_file = 'surface_smooth_vol.hdf5'

        workflow.connect(surface_smooth_vol, 'out_file',
                         surface_smooth_vol_to_ds, 'nifti_file')
        workflow.connect(get_files, 'attributes_file',
                         surface_smooth_vol_to_ds, 'attributes_file')
        workflow.connect(subjects, 'subject_id', surface_smooth_vol_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', surface_smooth_vol_to_ds,
                         'session_id')

        workflow.connect(surface_smooth_vol_to_ds, 'ds_file', datasink,
                         'ml.@surface_smooth_vol')

    hemi = pe.Node(util.IdentityInterface(fields=['hemi']), name='hemi')
    hemi.iterables = [('hemi', ['lh', 'rh'])]

    to_surface = pe.Node(fs.SampleToSurface(), name='to_surface')
    to_surface.inputs.sampling_method = 'average'
    to_surface.inputs.sampling_range = (0., 1., 0.1)
    to_surface.inputs.sampling_units = 'frac'
    to_surface.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', to_surface, 'hemi')
    workflow.connect(get_files, 'nifti_file', to_surface, 'source_file')
    workflow.connect(get_files, 'reg_file', to_surface, 'reg_file')

    if do_save_surface_ds:
        surface_to_ds = pe.Node(nmutil.NiftiToDataset(), name='surface_to_ds')
        workflow.connect(to_surface, 'out_file', surface_to_ds, 'nifti_file')
        workflow.connect(get_files, 'attributes_file', surface_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', surface_to_ds, 'subject_id')
        workflow.connect(sessions, 'session_dir', surface_to_ds, 'session_id')

        join_surfaces = pe.JoinNode(nmutil.JoinDatasets(),
                                    name='join_surfaces',
                                    joinsource='hemi',
                                    joinfield='input_datasets')
        join_surfaces.inputs.joined_dataset = 'surface.hdf5'
        join_surfaces.inputs.join_hemispheres = True
        workflow.connect(surface_to_ds, 'ds_file', join_surfaces,
                         'input_datasets')

        workflow.connect(join_surfaces, 'joined_dataset', datasink,
                         'ml.@surface')

    smooth_surface = pe.Node(fs.SurfaceSmooth(), name='smooth_surface')
    smooth_surface.inputs.subjects_dir = fs_dir
    workflow.connect(to_surface, 'out_file', smooth_surface, 'in_file')
    workflow.connect(sessions, 'subject_id', smooth_surface, 'subject_id')
    workflow.connect(hemi, 'hemi', smooth_surface, 'hemi')
    workflow.connect(fwhm, 'fwhm', smooth_surface, 'fwhm')

    if do_save_smooth_surface_ds:
        smooth_surface_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                       name='smooth_surface_to_ds')
        workflow.connect(smooth_surface, 'out_file', smooth_surface_to_ds,
                         'nifti_file')
        workflow.connect(get_files, 'attributes_file', smooth_surface_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', smooth_surface_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', smooth_surface_to_ds,
                         'session_id')

        join_smooth_surfaces = pe.JoinNode(nmutil.JoinDatasets(),
                                           name='join_smooth_surfaces',
                                           joinsource='hemi',
                                           joinfield='input_datasets')
        join_smooth_surfaces.inputs.joined_dataset = 'smooth_surface.hdf5'
        join_smooth_surfaces.inputs.join_hemispheres = True
        workflow.connect(smooth_surface_to_ds, 'ds_file', join_smooth_surfaces,
                         'input_datasets')

        workflow.connect(join_smooth_surfaces, 'joined_dataset', datasink,
                         'ml.@smooth_surface')

    ico_order = pe.Node(util.IdentityInterface(fields=['ico_order']),
                        name='ico_order')
    ico_order.iterables = [('ico_order', ico_order_vals)]

    to_sphere = pe.Node(fs.SurfaceTransform(), name='to_sphere')
    to_sphere.inputs.target_subject = 'ico'
    to_sphere.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', to_sphere, 'hemi')
    workflow.connect(smooth_surface, 'out_file', to_sphere, 'source_file')
    workflow.connect(subjects, 'subject_id', to_sphere, 'source_subject')
    workflow.connect(ico_order, 'ico_order', to_sphere, 'target_ico_order')

    if do_save_sphere_nifti:
        workflow.connect(to_sphere, 'out_file', datasink, 'surf.@sphere')

    template = {'annot_file': annot_template}
    get_annot_file = pe.Node(nio.SelectFiles(template), name='get_annot_file')
    get_annot_file.inputs.base_directory = fs_dir
    get_annot_file.inputs.subject_id = 'fsaverage'
    workflow.connect(hemi, 'hemi', get_annot_file, 'hemi')

    transform_annot = pe.Node(fs.SurfaceTransform(), name='transform_annot')
    transform_annot.inputs.source_subject = 'fsaverage'
    transform_annot.inputs.target_subject = 'ico'
    transform_annot.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', transform_annot, 'hemi')
    workflow.connect(get_annot_file, 'annot_file', transform_annot,
                     'source_annot_file')
    workflow.connect(ico_order, 'ico_order', transform_annot,
                     'target_ico_order')

    sphere_to_ds = pe.Node(nmutil.NiftiToDataset(), name='sphere_to_ds')
    workflow.connect(to_sphere, 'out_file', sphere_to_ds, 'nifti_file')
    workflow.connect(get_files, 'attributes_file', sphere_to_ds,
                     'attributes_file')
    workflow.connect(transform_annot, 'out_file', sphere_to_ds, 'annot_file')
    workflow.connect(subjects, 'subject_id', sphere_to_ds, 'subject_id')
    workflow.connect(sessions, 'session_dir', sphere_to_ds, 'session_id')

    join_hemispheres = pe.JoinNode(nmutil.JoinDatasets(),
                                   name='join_hemispheres',
                                   joinsource='hemi',
                                   joinfield='input_datasets')
    join_hemispheres.inputs.joined_dataset = 'sphere.hdf5'
    join_hemispheres.inputs.join_hemispheres = True

    workflow.connect(sphere_to_ds, 'ds_file', join_hemispheres,
                     'input_datasets')

    if do_save_sphere_ds:
        workflow.connect(join_hemispheres, 'joined_dataset', datasink,
                         'ml.@sphere')

    join_sessions = pe.JoinNode(nmutil.JoinDatasets(),
                                name='join_sessions',
                                joinsource='sessions',
                                joinfield='input_datasets')
    workflow.connect(join_hemispheres, 'joined_dataset', join_sessions,
                     'input_datasets')

    if do_save_join_sessions_ds:
        join_sessions_sink = pe.Node(nio.DataSink(), name='join_sessions_sink')
        join_sessions_sink.inputs.parameterization = False
        join_sessions_sink.inputs.base_directory = os.path.join(
            project_dir, 'ml')
        workflow.connect(subjects, 'subject_id', join_sessions_sink,
                         'container')
        workflow.connect(join_sessions, 'joined_dataset', join_sessions_sink,
                         '@join_sessions')

    join_subjects = pe.JoinNode(nmutil.JoinDatasets(),
                                name='join_subjects',
                                joinsource='subjects',
                                joinfield='input_datasets')
    workflow.connect(join_sessions, 'joined_dataset', join_subjects,
                     'input_datasets')

    if do_save_join_subjects_ds:
        join_subjects_sink = pe.Node(nio.DataSink(), name='join_subjects_sink')
        join_subjects_sink.inputs.parameterization = False
        join_subjects_sink.inputs.base_directory = os.path.join(
            project_dir, 'ml')
        workflow.connect(join_subjects, 'joined_dataset', join_subjects_sink,
                         '@join_subjects')

    return workflow
Example #16
0
def create_ml_preprocess_workflow(name,
                                  project_dir,
                                  work_dir,
                                  sessions_file,
                                  session_template,
                                  fs_dir,
                                  fwhm_vals=[2],
                                  ico_order_vals=[4],
                                  do_save_vol_ds = False,
                                  do_save_smooth_vol_ds = False,
                                  do_save_surface_smooth_vol_ds = False,
                                  do_save_surface_ds = False,
                                  do_save_smooth_surface_ds = False,
                                  do_save_sphere_nifti = False,
                                  do_save_sphere_ds = True,
                                  do_save_join_sessions_ds = True,
                                  do_save_join_subjects_ds = True):

    #initialize workflow                                                                                   
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    sessions_info = ColumnData(sessions_file, dtype=str)
    subject_ids = set(sessions_info['subject_id'])
    session_map = [(sid,[s for i,s,r in zip(*sessions_info.values()) if i == sid])
                   for sid in subject_ids]

    ##for each subject                                                                                         
    subjects = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name='subjects')
    subjects.iterables = [('subject_id', subject_ids)]

    ##for each session                                                                         
    sessions = pe.Node(interface=util.IdentityInterface(fields=['subject_id','session_dir']), name='sessions')
    sessions.itersource = ('subjects','subject_id')
    sessions.iterables = [('session_dir', dict(session_map))]
    workflow.connect(subjects,'subject_id',sessions,'subject_id')

    #get session directory                                                                                                        
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template), name='get_session_dir')
    workflow.connect(sessions,'session_dir',get_session_dir,'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir,'session_dir',datasink,'base_directory')

    template = {'nifti_file':'mri/f.nii.gz',
                'attributes_file':'attributes.txt',
                'reg_file':'mri/transforms/functional_to_anatomy.dat'}
    get_files = pe.Node(nio.SelectFiles(template), name='get_files')
    workflow.connect(get_session_dir,'session_dir',get_files,'base_directory')

    vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='vol_to_ds')
    vol_to_ds.inputs.ds_file = 'vol.hdf5'

    workflow.connect(get_files,'nifti_file',vol_to_ds,'nifti_file')
    workflow.connect(get_files,'attributes_file',vol_to_ds,'attributes_file')
    workflow.connect(subjects,'subject_id',vol_to_ds,'subject_id')
    workflow.connect(sessions,'session_dir',vol_to_ds,'session_id')

    if do_save_vol_ds:
        workflow.connect(vol_to_ds,'ds_file',datasink,'ml.@vol')

    fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']), name='fwhm')
    fwhm.iterables = [('fwhm',fwhm_vals)]

    if do_save_smooth_vol_ds:
        smooth_vol = pe.Node(interface=fs.MRIConvert(), name='smooth_vol')
        workflow.connect(get_files,'nifti_file',smooth_vol,'in_file')
        workflow.connect(fwhm,'fwhm',smooth_vol,'fwhm')
    
        smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='smooth_vol_to_ds')
        smooth_vol_to_ds.inputs.ds_file = 'smooth_vol.hdf5'
    
        workflow.connect(smooth_vol,'out_file',smooth_vol_to_ds,'nifti_file')
        workflow.connect(get_files,'attributes_file',smooth_vol_to_ds,'attributes_file')
        workflow.connect(subjects,'subject_id',smooth_vol_to_ds,'subject_id')
        workflow.connect(sessions,'session_dir',smooth_vol_to_ds,'session_id')
    
        workflow.connect(smooth_vol_to_ds,'ds_file',datasink,'ml.@smooth_vol')

    if do_save_surface_smooth_vol_ds:
        surface_smooth_vol = pe.Node(interface=fs.Smooth(), name='surface_smooth_vol')
        workflow.connect(get_files,'reg_file',surface_smooth_vol,'reg_file')
        workflow.connect(get_files,'nifti_file',surface_smooth_vol,'in_file')
        workflow.connect(fwhm,'fwhm',surface_smooth_vol,'surface_fwhm')
    
        surface_smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='surface_smooth_vol_to_ds')
        surface_smooth_vol_to_ds.inputs.ds_file = 'surface_smooth_vol.hdf5'
    
        workflow.connect(surface_smooth_vol,'out_file',surface_smooth_vol_to_ds,'nifti_file')
        workflow.connect(get_files,'attributes_file',surface_smooth_vol_to_ds,'attributes_file')
        workflow.connect(subjects,'subject_id',surface_smooth_vol_to_ds,'subject_id')
        workflow.connect(sessions,'session_dir',surface_smooth_vol_to_ds,'session_id')
    
        workflow.connect(surface_smooth_vol_to_ds,'ds_file',datasink,'ml.@surface_smooth_vol')

    hemi = pe.Node(util.IdentityInterface(fields=['hemi']), name='hemi')
    hemi.iterables = [('hemi',['lh','rh'])]

    to_surface = pe.Node(fs.SampleToSurface(), name='to_surface')
    to_surface.inputs.sampling_method = 'average'
    to_surface.inputs.sampling_range = (0., 1., 0.1)
    to_surface.inputs.sampling_units = 'frac'
    workflow.connect(hemi,'hemi',to_surface,'hemi')
    workflow.connect(get_files,'nifti_file',to_surface,'source_file')
    workflow.connect(get_files,'reg_file',to_surface,'reg_file')

    if do_save_surface_ds:    
        surface_to_ds = pe.Node(nmutil.NiftiToDataset(), name='surface_to_ds')
        workflow.connect(to_surface,'out_file',surface_to_ds,'nifti_file')
        workflow.connect(get_files,'attributes_file',surface_to_ds,'attributes_file')
        workflow.connect(subjects,'subject_id',surface_to_ds,'subject_id')
        workflow.connect(sessions,'session_dir',surface_to_ds,'session_id')

        join_surfaces = pe.JoinNode(nmutil.JoinDatasets(), 
                                    name='join_surfaces',
                                    joinsource='hemi',
                                    joinfield='input_datasets')
        join_surfaces.inputs.joined_dataset = 'surface.hdf5'
        join_surfaces.inputs.join_hemispheres = True
        workflow.connect(surface_to_ds,'ds_file',join_surfaces,'input_datasets')
    
        workflow.connect(join_surfaces,'joined_dataset',datasink,'ml.@surface')

    smooth_surface = pe.Node(fs.SurfaceSmooth(), name='smooth_surface')
    workflow.connect(to_surface,'out_file',smooth_surface,'in_file')
    workflow.connect(sessions,'subject_id',smooth_surface,'subject_id')
    workflow.connect(hemi,'hemi',smooth_surface,'hemi')
    workflow.connect(fwhm,'fwhm',smooth_surface,'fwhm')

    if do_save_smooth_surface_ds:        
        smooth_surface_to_ds = pe.Node(nmutil.NiftiToDataset(), name='smooth_surface_to_ds')
        workflow.connect(smooth_surface,'out_file',smooth_surface_to_ds,'nifti_file')
        workflow.connect(get_files,'attributes_file',smooth_surface_to_ds,'attributes_file')
        workflow.connect(subjects,'subject_id',smooth_surface_to_ds,'subject_id')
        workflow.connect(sessions,'session_dir',smooth_surface_to_ds,'session_id')

        join_smooth_surfaces = pe.JoinNode(nmutil.JoinDatasets(), 
                                           name='join_smooth_surfaces',
                                           joinsource='hemi',
                                           joinfield='input_datasets')
        join_smooth_surfaces.inputs.joined_dataset = 'smooth_surface.hdf5'
        join_smooth_surfaces.inputs.join_hemispheres = True
        workflow.connect(smooth_surface_to_ds,'ds_file',join_smooth_surfaces,'input_datasets')
    
        workflow.connect(join_smooth_surfaces,'joined_dataset',datasink,'ml.@smooth_surface')
    

    ico_order = pe.Node(util.IdentityInterface(fields=['ico_order']), name='ico_order')
    ico_order.iterables = [('ico_order',ico_order_vals)]

    to_sphere = pe.Node(fs.SurfaceTransform(), name='to_sphere')
    to_sphere.inputs.target_subject = 'ico'
    workflow.connect(hemi,'hemi',to_sphere,'hemi')
    workflow.connect(smooth_surface,'out_file',to_sphere,'source_file')
    workflow.connect(subjects,'subject_id',to_sphere,'source_subject')
    workflow.connect(ico_order,'ico_order',to_sphere,'target_ico_order')

    if do_save_sphere_nifti:
        workflow.connect(to_sphere,'out_file',datasink,'surf.@sphere')

    template = {'annot_file':'{subject_id}/label/{hemi}.aparc.a2009s.annot'}
    get_annot_file = pe.Node(nio.SelectFiles(template), name='get_annot_file')
    get_annot_file.inputs.base_directory = fs_dir
    get_annot_file.inputs.subject_id = 'fsaverage'
    workflow.connect(hemi,'hemi',get_annot_file,'hemi')

    transform_annot = pe.Node(fs.SurfaceTransform(), name='transform_annot')
    transform_annot.inputs.source_subject = 'fsaverage'
    transform_annot.inputs.target_subject = 'ico'
    workflow.connect(hemi,'hemi',transform_annot,'hemi')
    workflow.connect(get_annot_file,'annot_file',transform_annot,'source_annot_file')
    workflow.connect(ico_order,'ico_order',transform_annot,'target_ico_order')
    
    sphere_to_ds = pe.Node(nmutil.NiftiToDataset(), name='sphere_to_ds')
    workflow.connect(to_sphere,'out_file',sphere_to_ds,'nifti_file')
    workflow.connect(get_files,'attributes_file',sphere_to_ds,'attributes_file')
    workflow.connect(transform_annot,'out_file',sphere_to_ds,'annot_file')
    workflow.connect(subjects,'subject_id',sphere_to_ds,'subject_id')
    workflow.connect(sessions,'session_dir',sphere_to_ds,'session_id')

    join_hemispheres = pe.JoinNode(nmutil.JoinDatasets(), 
                                   name='join_hemispheres',
                                   joinsource='hemi',
                                   joinfield='input_datasets')
    join_hemispheres.inputs.joined_dataset = 'sphere.hdf5'
    join_hemispheres.inputs.join_hemispheres = True

    workflow.connect(sphere_to_ds,'ds_file',join_hemispheres,'input_datasets')

    if do_save_sphere_ds:
        workflow.connect(join_hemispheres,'joined_dataset',datasink,'ml.@sphere')

    join_sessions = pe.JoinNode(nmutil.JoinDatasets(), 
                                name='join_sessions',
                                joinsource='sessions',
                                joinfield='input_datasets')
    workflow.connect(join_hemispheres,'joined_dataset',join_sessions,'input_datasets')

    if do_save_join_sessions_ds:
        join_sessions_sink = pe.Node(nio.DataSink(), name='join_sessions_sink')
        join_sessions_sink.inputs.parameterization = False
        join_sessions_sink.inputs.base_directory = os.path.join(project_dir,'ml')
        workflow.connect(subjects,'subject_id',join_sessions_sink,'container')
        workflow.connect(join_sessions,'joined_dataset',join_sessions_sink,'@join_sessions')

    join_subjects = pe.JoinNode(nmutil.JoinDatasets(),
                                name='join_subjects',
                                joinsource='subjects',
                                joinfield='input_datasets')
    workflow.connect(join_sessions,'joined_dataset',join_subjects,'input_datasets')

    if do_save_join_subjects_ds:
        join_subjects_sink = pe.Node(nio.DataSink(), name='join_subjects_sink')
        join_subjects_sink.inputs.parameterization = False
        join_subjects_sink.inputs.base_directory = os.path.join(project_dir,'ml')
        workflow.connect(join_subjects,'joined_dataset',join_subjects_sink,'@join_subjects')

    return workflow
Example #17
0
 def tofile(self, filename):
     """Write data to a FSL EV3 file.
     """
     ColumnData.tofile(self, filename, header=False, header_order=["onsets", "durations", "intensities"], sep=" ")
Example #18
0
def create_preprocess_workflow(name,
                               work_dir,
                               sessions_file,
                               session_template,
                               scan_list,
                               fs_dir,
                               do_extract_inplane=True,
                               do_save_inplane=True,
                               do_align_to_anatomy=True,
                               do_align_qa=True,
                               do_save_align_qa=True,
                               do_save_strip=True,
                               do_save_align=True,
                               do_extract_functionals=True,
                               do_save_functionals=True,
                               do_within_run_align=True,
                               do_slice_timing_correction=True,
                               do_between_run_align=True,
                               do_merge_functionals=True,
                               do_within_subject_align=True,
                               do_save_merge=True):
    #initialize workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    ##for each session
    sessions_info = ColumnData(sessions_file, dtype=str)
    sessions = pe.Node(interface=util.IdentityInterface(
        fields=['session_dir', 'subject_id', 'ref_vol']),
                       name='sessions')
    sessions.iterables = sessions_info.items()
    sessions.synchronize = True

    #get session directory
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template),
                              name='get_session_dir')
    workflow.connect(sessions, 'session_dir', get_session_dir, 'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir, 'session_dir', datasink,
                     'base_directory')

    #extract inplane
    if do_extract_inplane:
        extract_inplane = create_extract_inplane_workflow()
        workflow.connect(get_session_dir, 'session_dir', extract_inplane,
                         'inputs.session_dir')
        workflow.connect(sessions, 'ref_vol', extract_inplane,
                         'inputs.ref_vol')

        if do_save_inplane:
            workflow.connect(extract_inplane, 'outputs.out_file', datasink,
                             'mri.@inplane')

            #align inplanes to anatomy
            if do_align_to_anatomy:
                get_anatomy = pe.Node(interface=nio.FreeSurferSource(),
                                      name='get_anatomy')
                get_anatomy.inputs.subjects_dir = fs_dir
                workflow.connect(sessions, 'subject_id', get_anatomy,
                                 'subject_id')

                align_to_anatomy = create_align_to_anatomy_workflow()
                workflow.connect(extract_inplane, 'outputs.out_file',
                                 align_to_anatomy, 'inputs.inplane_file')
                workflow.connect(get_anatomy, 'brain', align_to_anatomy,
                                 'inputs.anatomy_file')

                if do_align_qa:
                    align_qa = pe.Node(interface=nmutil.AlignmentQA(),
                                       name='align_qa')
                    workflow.connect(get_anatomy, 'brain', align_qa,
                                     'target_file')
                    workflow.connect(align_to_anatomy, 'outputs.strip_file',
                                     align_qa, 'source_file')
                    workflow.connect(align_to_anatomy, 'outputs.xfm_file',
                                     align_qa, 'reg_file')

                    if do_save_align_qa:
                        workflow.connect(align_qa, 'out_file', datasink,
                                         'qa.inplane_to_anatomy')

                if do_save_strip:
                    workflow.connect(align_to_anatomy, 'outputs.strip_file',
                                     datasink, 'mri.@inplane.@strip')

                if do_save_align:
                    workflow.connect(align_to_anatomy, 'outputs.xfm_file',
                                     datasink,
                                     'mri.transforms.@inplane_to_anatomy')

    if do_extract_functionals:
        ##for each functional
        scans = pe.Node(interface=util.IdentityInterface(fields=['scan']),
                        name='scans')
        scans.iterables = ('scan', scan_list)

        #extract functionals
        extract_functional = create_extract_functional_workflow()
        workflow.connect(get_session_dir, 'session_dir', extract_functional,
                         'inputs.session_dir')
        workflow.connect(scans, 'scan', extract_functional, 'inputs.scan')
        last_node = extract_functional

        #simultaneous slicing timing and motion correction
        if do_within_run_align:
            within_run_align = create_within_run_align_workflow(
                slice_timing_correction=do_slice_timing_correction)
            workflow.connect(last_node, 'outputs.out_file', within_run_align,
                             'inputs.in_file')
            last_node = within_run_align

        ##with all functionals
        join_functionals = pe.JoinNode(
            interface=util.IdentityInterface(fields=['functionals']),
            name='join_functionals',
            joinsource='scans')

        workflow.connect(last_node, 'outputs.out_file', join_functionals,
                         'functionals')

        #between run align
        if do_between_run_align:
            between_run_align = create_between_run_align_workflow()
            workflow.connect(join_functionals, 'functionals',
                             between_run_align, 'inputs.in_files')
            workflow.connect(sessions, 'ref_vol', between_run_align,
                             'inputs.ref_vol')

            workflow.connect(between_run_align, 'outputs.out_files', datasink,
                             'mri.@functionals')

            #merge functionals
            if do_merge_functionals:
                merge_functionals = pe.Node(interface=fsl.Merge(),
                                            name='merge_functionals')
                merge_functionals.inputs.dimension = 't'
                format_string = 'f'
                rename_merged = pe.Node(interface=util.Rename(format_string),
                                        name='rename_merged')
                rename_merged.inputs.keep_ext = True
                workflow.connect(between_run_align, 'outputs.out_files',
                                 merge_functionals, 'in_files')
                workflow.connect(merge_functionals, 'merged_file',
                                 rename_merged, 'in_file')

            if do_save_merge:
                workflow.connect(rename_merged, 'out_file', datasink,
                                 'mri.@functionals.@merged')

    return workflow
Example #19
0
def create_preprocess_workflow(name,
                               work_dir,
                               sessions_file,
                               session_template,
                               scan_list,
                               fs_dir,
                               do_extract_inplane = True,
                               do_save_inplane = True,
                               do_align_to_anatomy = True,
                               do_align_qa = True,
                               do_save_align_qa = True,
                               do_save_strip = True,
                               do_save_align = True,
                               do_extract_functionals = True,
                               do_save_functionals = True,
                               do_within_run_align = True,
                               do_slice_timing_correction = True,
                               do_between_run_align = True,
                               do_merge_functionals = True,
                               do_within_subject_align = True,
                               do_save_merge = True):
    #initialize workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    ##for each session
    sessions_info = ColumnData(sessions_file, dtype=str)
    sessions = pe.Node(interface=util.IdentityInterface(fields=['session_dir','subject_id','ref_vol']), name='sessions')
    sessions.iterables = sessions_info.items()
    sessions.synchronize = True

    #get session directory
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template), name='get_session_dir')
    workflow.connect(sessions,'session_dir',get_session_dir,'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir,'session_dir',datasink,'base_directory')

    #extract inplane
    if do_extract_inplane:
        extract_inplane = create_extract_inplane_workflow()
        workflow.connect(get_session_dir,'session_dir',extract_inplane,'inputs.session_dir')
        workflow.connect(sessions,'ref_vol',extract_inplane,'inputs.ref_vol')

        if do_save_inplane:
            workflow.connect(extract_inplane,'outputs.out_file',datasink,'mri.@inplane')

            #align inplanes to anatomy
            if do_align_to_anatomy:
                get_anatomy = pe.Node(interface=nio.FreeSurferSource(), name='get_anatomy')
                get_anatomy.inputs.subjects_dir = fs_dir
                workflow.connect(sessions,'subject_id',get_anatomy,'subject_id')
                
                align_to_anatomy = create_align_to_anatomy_workflow()
                workflow.connect(extract_inplane,'outputs.out_file',align_to_anatomy,'inputs.inplane_file')
                workflow.connect(get_anatomy,'brain',align_to_anatomy,'inputs.anatomy_file')

                if do_align_qa:
                    align_qa = pe.Node(interface=nmutil.AlignmentQA(), name='align_qa')
                    workflow.connect(get_anatomy,'brain',align_qa,'target_file')
                    workflow.connect(align_to_anatomy,'outputs.strip_file',align_qa,'source_file')
                    workflow.connect(align_to_anatomy,'outputs.xfm_file',align_qa,'reg_file')

                    if do_save_align_qa:
                        workflow.connect(align_qa,'out_file',datasink,'qa.inplane_to_anatomy')

                if do_save_strip:
                    workflow.connect(align_to_anatomy,'outputs.strip_file',datasink,'mri.@inplane.@strip')

                if do_save_align:
                    workflow.connect(align_to_anatomy,'outputs.xfm_file',datasink,'mri.transforms.@inplane_to_anatomy')

    if do_extract_functionals:
        ##for each functional
        scans = pe.Node(interface=util.IdentityInterface(fields=['scan']), name='scans')
        scans.iterables = ('scan', scan_list)

        #extract functionals
        extract_functional = create_extract_functional_workflow()
        workflow.connect(get_session_dir,'session_dir',extract_functional,'inputs.session_dir')
        workflow.connect(scans,'scan',extract_functional,'inputs.scan')
        last_node = extract_functional

        #simultaneous slicing timing and motion correction
        if do_within_run_align:
            within_run_align = create_within_run_align_workflow(slice_timing_correction = do_slice_timing_correction)
            workflow.connect(last_node,'outputs.out_file',within_run_align,'inputs.in_file')
            last_node = within_run_align

        ##with all functionals
        join_functionals = pe.JoinNode(interface=util.IdentityInterface(fields=['functionals']),
                                       name='join_functionals',
                                       joinsource='scans')
        
        workflow.connect(last_node,'outputs.out_file',join_functionals,'functionals')

        #between run align
        if do_between_run_align:
            between_run_align = create_between_run_align_workflow()
            workflow.connect(join_functionals,'functionals',between_run_align,'inputs.in_files')
            workflow.connect(sessions,'ref_vol',between_run_align,'inputs.ref_vol')

            workflow.connect(between_run_align,'outputs.out_files',datasink,'mri.@functionals')

            #merge functionals
            if do_merge_functionals:
                merge_functionals = pe.Node(interface=fsl.Merge(), name='merge_functionals')
                merge_functionals.inputs.dimension = 't'
                format_string = 'f'
                rename_merged = pe.Node(interface=util.Rename(format_string), name='rename_merged')
                rename_merged.inputs.keep_ext = True
                workflow.connect(between_run_align,'outputs.out_files',merge_functionals,'in_files')
                workflow.connect(merge_functionals,'merged_file',rename_merged,'in_file')

    
            if do_save_merge:
                workflow.connect(rename_merged,'out_file',datasink,'mri.@functionals.@merged')

    return workflow