def test_AddCSVRow_outputs(): output_map = dict(csv_file=dict(), ) outputs = AddCSVRow.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_AddCSVRow_inputs(): input_map = dict(_outputs=dict(usedefault=True, ), ignore_exception=dict(nohash=True, usedefault=True, ), in_file=dict(mandatory=True, ), ) inputs = AddCSVRow.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_AddCSVRow_inputs(): input_map = dict( _outputs=dict(usedefault=True, ), ignore_exception=dict( nohash=True, usedefault=True, ), in_file=dict(mandatory=True, ), ) inputs = AddCSVRow.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def hcp_workflow(name='Evaluation_HCP', settings={}, map_metric=False, compute_fmb=False): """ The regseg evaluation workflow for the human connectome project (HCP) """ from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu from nipype.interfaces import io as nio from nipype.algorithms.mesh import ComputeMeshWarp, WarpPoints from nipype.algorithms.misc import AddCSVRow from nipype.workflows.dmri.fsl.artifacts import sdc_fmb from .. import data from ..interfaces.utility import (ExportSlices, TileSlicesGrid, SlicesGridplot) from .registration import regseg_wf, sdc_t2b from .preprocess import preprocess from .fieldmap import process_vsm from .dti import mrtrix_dti import evaluation as ev wf = pe.Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface(fields=['subject_id', 'data_dir']), name='inputnode') inputnode.inputs.data_dir = settings['data_dir'] inputnode.iterables = [('subject_id', settings['subject_id'])] # Generate the distorted set, including surfaces pre = preprocess() rdti = mrtrix_dti('ReferenceDTI') wdti = mrtrix_dti('WarpedDTI') mdti = pe.Node(niu.Merge(2), name='MergeDTI') wf.connect([ (inputnode, pre, [('subject_id', 'inputnode.subject_id'), ('data_dir', 'inputnode.data_dir')]), (pre, rdti, [('outputnode.dwi', 'inputnode.in_dwi'), ('outputnode.dwi_mask', 'inputnode.in_mask'), ('outputnode.bvec', 'inputnode.in_bvec'), ('outputnode.bval', 'inputnode.in_bval')]), (pre, wdti, [('outputnode.warped_dwi', 'inputnode.in_dwi'), ('outputnode.warped_msk', 'inputnode.in_mask'), ('outputnode.bvec', 'inputnode.in_bvec'), ('outputnode.bval', 'inputnode.in_bval')]), (wdti, mdti, [('outputnode.fa', 'in1'), ('outputnode.md', 'in2')]), ]) regseg = regseg_wf(usemask=True) regseg.inputs.inputnode.options = data.get('regseg_hcp') exprs = pe.Node(ExportSlices(slices=[38, 48, 57, 67, 76, 86], axis=['axial', 'sagittal']), name='ExportREGSEG') gridrs = pe.Node(SlicesGridplot(label=['regseg', 'regseg'], slices=[38, 48, 57, 67, 76, 86], view=['axial', 'sagittal']), name='GridPlotREGSEG') meshrs = pe.MapNode(ComputeMeshWarp(), iterfield=['surface1', 'surface2'], name='REGSEGSurfDistance') csvrs = pe.Node(AddCSVRow(in_file=settings['out_csv']), name="REGSEGAddRow") csvrs.inputs.method = 'REGSEG' wf.connect([(mdti, regseg, [('out', 'inputnode.in_fixed')]), (pre, regseg, [('outputnode.surf', 'inputnode.in_surf'), ('outputnode.warped_msk', 'inputnode.in_mask') ]), (pre, exprs, [('outputnode.warped_surf', 'sgreen')]), (regseg, exprs, [('outputnode.out_surf', 'syellow')]), (wdti, exprs, [('outputnode.fa', 'reference')]), (exprs, gridrs, [('out_files', 'in_files')]), (pre, meshrs, [('outputnode.warped_surf', 'surface1')]), (regseg, meshrs, [('outputnode.out_surf', 'surface2')]), (inputnode, csvrs, [('subject_id', 'subject_id')]), (meshrs, csvrs, [('distance', 'surf_dist')])]) if compute_fmb: cmethod0 = sdc_fmb() selbmap = pe.Node(niu.Split(splits=[1, 1], squeeze=True), name='SelectBmap') dfm = process_vsm() dfm.inputs.inputnode.scaling = 1.0 dfm.inputs.inputnode.enc_dir = 'y-' wrpsurf = pe.MapNode(WarpPoints(), iterfield=['points'], name='UnwarpSurfs') export0 = pe.Node(ExportSlices(slices=[38, 48, 57, 67, 76, 86], axis=['axial', 'sagittal']), name='ExportFMB') mesh0 = pe.MapNode(ComputeMeshWarp(), iterfield=['surface1', 'surface2'], name='FMBSurfDistance') grid0 = pe.Node(SlicesGridplot(label=['FMB'] * 2, slices=[38, 48, 57, 67, 76, 86], view=['axial', 'sagittal']), name='GridPlotFMB') csv0 = pe.Node(AddCSVRow(in_file=settings['out_csv']), name="FMBAddRow") csv0.inputs.method = 'FMB' wf.connect([ (pre, cmethod0, [('outputnode.warped_dwi', 'inputnode.in_file'), ('outputnode.warped_msk', 'inputnode.in_mask'), ('outputnode.bval', 'inputnode.in_bval'), ('outputnode.mr_param', 'inputnode.settings')]), (pre, selbmap, [('outputnode.bmap_wrapped', 'inlist')]), (selbmap, cmethod0, [('out1', 'inputnode.bmap_mag'), ('out2', 'inputnode.bmap_pha')]), (cmethod0, dfm, [('outputnode.out_vsm', 'inputnode.vsm')]), (pre, dfm, [('outputnode.warped_msk', 'inputnode.reference')]), (dfm, wrpsurf, [('outputnode.dfm', 'warp')]), (pre, wrpsurf, [('outputnode.surf', 'points') ])(wrpsurf, export0, [('out_points', 'syellow')]), (pre, export0, [('outputnode.warped_surf', 'sgreen')]), (wdti, export0, [('outputnode.fa', 'reference')]), (export0, grid0, [('out_files', 'in_files')]), (pre, mesh0, [('outputnode.warped_surf', 'surface1')]), (wrpsurf, mesh0, [('out_points', 'surface2')]), (inputnode, csv0, [('subject_id', 'subject_id')]), (mesh0, csv0, [('distance', 'surf_dist')]) ]) cmethod1 = sdc_t2b(num_threads=settings['nthreads']) export1 = pe.Node(ExportSlices(slices=[38, 48, 57, 67, 76, 86], axis=['axial', 'sagittal']), name='ExportT2B') grid1 = pe.Node(SlicesGridplot(label=['T2B'] * 2, slices=[38, 48, 57, 67, 76, 86], view=['axial', 'sagittal']), name='GridPlotT2B') mesh1 = pe.MapNode(ComputeMeshWarp(), iterfield=['surface1', 'surface2'], name='T2BSurfDistance') csv1 = pe.Node(AddCSVRow(in_file=settings['out_csv']), name="T2BAddRow") csv1.inputs.method = 'T2B' wf.connect([(pre, cmethod1, [('outputnode.warped_dwi', 'inputnode.in_dwi'), ('outputnode.warped_msk', 'inputnode.dwi_mask'), ('outputnode.t2w_brain', 'inputnode.in_t2w'), ('outputnode.t1w_mask', 'inputnode.t2w_mask'), ('outputnode.surf', 'inputnode.in_surf'), ('outputnode.bval', 'inputnode.in_bval'), ('outputnode.mr_param', 'inputnode.in_param')]), (cmethod1, export1, [('outputnode.out_surf', 'syellow')]), (pre, export1, [('outputnode.warped_surf', 'sgreen')]), (wdti, export1, [('outputnode.fa', 'reference')]), (export1, grid1, [('out_files', 'in_files')]), (pre, mesh1, [('outputnode.warped_surf', 'surface1')]), (cmethod1, mesh1, [('outputnode.out_surf', 'surface2')]), (inputnode, csv1, [('subject_id', 'subject_id')]), (mesh1, csv1, [('distance', 'surf_dist')])]) tile = pe.Node(TileSlicesGrid(), name='TileGridplots') csvtile = pe.Node(AddCSVRow( in_file=op.join(op.dirname(settings['out_csv']), 'tiles.csv')), name="TileAddRow") wf.connect([(inputnode, tile, [('subject_id', 'out_file')]), (gridrs, tile, [('out_file', 'in_reference')]), (grid1, tile, [('out_file', 'in_competing')]), (tile, csvtile, [('out_file', 'names')])]) if map_metric: out_csv = op.abspath(op.join(name, 'energiesmapping.csv')) mapen = ev.map_energy(out_csv=out_csv) wf.connect([ (inputnode, mapen, [('subject_id', 'inputnode.subject_id')]), (regseg, mapen, [('outputnode.out_enh', 'inputnode.reference'), ('outputnode.reg_msk', 'inputnode.in_mask')]), (pre, mapen, [('outputnode.warped_surf', 'inputnode.surfaces0'), ('outputnode.surf', 'inputnode.surfaces1')]) ]) return wf
def first_level_wf(pipeline, subject_id, task_id, output_dir): """ First level workflow """ workflow = pe.Workflow(name='_'.join((pipeline, subject_id, task_id))) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_preproc', 'contrasts', 'confounds', 'brainmask', 'events_file' ]), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['sigma_pre', 'sigma_post', 'out_stats']), name='outputnode') conf2movpar = pe.Node(niu.Function(function=_confounds2movpar), name='conf2movpar') masker = pe.Node(fsl.ApplyMask(), name='masker') bim = pe.Node(afni.BlurInMask(fwhm=5.0, outputtype='NIFTI_GZ'), name='bim', mem_gb=20) ev = pe.Node(EventsFilesForTask(task=task_id), name='events') l1 = pe.Node(SpecifyModel( input_units='secs', time_repetition=2, high_pass_filter_cutoff=100, parameter_source='FSL', ), name='l1') l1model = pe.Node(fsl.Level1Design(interscan_interval=2, bases={'dgamma': { 'derivs': True }}, model_serial_correlations=True), name='l1design') l1featmodel = pe.Node(fsl.FEATModel(), name='l1model') l1estimate = pe.Node(fsl.FEAT(), name='l1estimate', mem_gb=40) pre_smooth_afni = pe.Node(afni.FWHMx(combine=True, detrend=True, args='-ShowMeClassicFWHM'), name='smooth_pre_afni', mem_gb=20) post_smooth_afni = pe.Node(afni.FWHMx(combine=True, detrend=True, args='-ShowMeClassicFWHM'), name='smooth_post_afni', mem_gb=20) pre_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_pre', mem_gb=20) post_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_post', mem_gb=20) def _resels(val): return val**(1 / 3.) def _fwhm(fwhm): from numpy import mean return float(mean(fwhm, dtype=float)) workflow.connect([ (inputnode, masker, [('bold_preproc', 'in_file'), ('brainmask', 'mask_file')]), (inputnode, ev, [('events_file', 'in_file')]), (inputnode, l1model, [('contrasts', 'contrasts')]), (inputnode, conf2movpar, [('confounds', 'in_confounds')]), (inputnode, bim, [('brainmask', 'mask')]), (masker, bim, [('out_file', 'in_file')]), (bim, l1, [('out_file', 'functional_runs')]), (ev, l1, [('event_files', 'event_files')]), (conf2movpar, l1, [('out', 'realignment_parameters')]), (l1, l1model, [('session_info', 'session_info')]), (ev, l1model, [('orthogonalization', 'orthogonalization')]), (l1model, l1featmodel, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (l1model, l1estimate, [('fsf_files', 'fsf_file')]), # Smooth (inputnode, pre_smooth, [('bold_preproc', 'zstat_file'), ('brainmask', 'mask_file')]), (bim, post_smooth, [('out_file', 'zstat_file')]), (inputnode, post_smooth, [('brainmask', 'mask_file')]), (pre_smooth, outputnode, [(('resels', _resels), 'sigma_pre')]), (post_smooth, outputnode, [(('resels', _resels), 'sigma_post')]), # Smooth with AFNI (inputnode, pre_smooth_afni, [('bold_preproc', 'in_file'), ('brainmask', 'mask')]), (bim, post_smooth_afni, [('out_file', 'in_file')]), (inputnode, post_smooth_afni, [('brainmask', 'mask')]), ]) # Writing outputs csv = pe.Node(AddCSVRow(in_file=str(output_dir / 'smoothness.csv')), name='addcsv_%s_%s' % (subject_id, pipeline)) csv.inputs.sub_id = subject_id csv.inputs.pipeline = pipeline # Datasinks ds_stats = pe.Node(niu.Function(function=_feat_stats), name='ds_stats') ds_stats.inputs.subject_id = subject_id ds_stats.inputs.task_id = task_id ds_stats.inputs.variant = pipeline ds_stats.inputs.out_path = output_dir setattr(ds_stats.interface, '_always_run', True) workflow.connect([ (outputnode, csv, [('sigma_pre', 'smooth_pre'), ('sigma_post', 'smooth_post')]), (pre_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_pre')]), (post_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_post')]), (l1estimate, ds_stats, [('feat_dir', 'feat_dir')]), (ds_stats, outputnode, [('out', 'out_stats')]), ]) return workflow