def get_volume(mask, thresh, out_file): Mask = MathsCommand(in_file = mask, args = "-thr {0} -bin".format(thresh), out_file=out_file) Mout = Mask.run() Volume = ImageStats(in_file = mask, op_string = '-l {0} -V'.format(thresh)) Vout=Volume.run() outstat = Vout.outputs.out_stat return outstat[1]
def test_MathsCommand_outputs(): output_map = dict(out_file=dict(), ) outputs = MathsCommand.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
def make_workflow_roi(region): """ Benson_ROI_Names = {'V1', 'V2', 'V3', 'hV4', 'VO1', 'VO2', 'LO1', 'LO2', 'TO1', 'TO2', 'V3B', 'V3A'}; Wang_ROI_Names = [ 'V1v', 'V1d', 'V2v', 'V2d', 'V3v', 'V3d', 'hV4', 'VO1', 'VO2', 'PHC1', 'PHC2', 'TO2', 'TO1', 'LO2', 'LO1', 'V3B', 'V3A', 'IPS0', 'IPS1', 'IPS2', 'IPS3', 'IPS4' , 'IPS5', 'SPL1', 'FEF']; """ w = Workflow(f'roi_{region}') n_in = Node(IdentityInterface(fields=[ 'atlas', 'func_to_struct', 'struct_to_freesurfer', 'ref', ]), name='input') n_out = Node(IdentityInterface(fields=[ 'mask_file', ]), name='output') n_m = Node(Merge(2), 'merge') n_v = Node(MathsCommand(), region) n_v.inputs.out_file = 'roi.nii.gz' n_v.inputs.nan2zeros = True if region == 'V1': n_v.inputs.args = '-uthr 1 -bin' elif region == 'V2': n_v.inputs.args = '-thr 2 -uthr 3 -bin' elif region == 'V3': n_v.inputs.args = '-thr 4 -uthr 5 -bin' else: raise ValueError(f'Unknown region {region}. It should be V1, V2, V3') at = Node(ApplyTransforms(), 'applytransform') at.inputs.dimension = 3 at.inputs.output_image = 'roi_func.nii.gz' at.inputs.interpolation = 'Linear' at.inputs.default_value = 0 at.inputs.invert_transform_flags = [True, True] w.connect(n_in, 'atlas', n_v, 'in_file') w.connect(n_v, 'out_file', at, 'input_image') w.connect(n_in, 'ref', at, 'reference_image') w.connect(n_in, 'struct_to_freesurfer', n_m, 'in1') w.connect(n_in, 'func_to_struct', n_m, 'in2') w.connect(n_m, 'out', at, 'transforms') w.connect(at, 'output_image', n_out, 'mask_file') return w
def test_MathsCommand_inputs(): input_map = dict( args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), ignore_exception=dict( nohash=True, usedefault=True, ), in_file=dict( argstr='%s', mandatory=True, position=2, ), internal_datatype=dict( argstr='-dt %s', position=1, ), nan2zeros=dict( argstr='-nan', position=3, ), out_file=dict( argstr='%s', genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr='-odt %s', position=-1, ), output_type=dict(), terminal_output=dict( mandatory=True, nohash=True, ), ) inputs = MathsCommand.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_MathsCommand_inputs(): input_map = dict(ignore_exception=dict(nohash=True, usedefault=True, ), nan2zeros=dict(position=3, argstr='-nan', ), out_file=dict(hash_files=False, genfile=True, position=-2, argstr='%s', ), args=dict(argstr='%s', ), internal_datatype=dict(position=1, argstr='-dt %s', ), terminal_output=dict(mandatory=True, nohash=True, ), environ=dict(nohash=True, usedefault=True, ), in_file=dict(position=2, mandatory=True, argstr='%s', ), output_type=dict(), output_datatype=dict(position=-1, argstr='-odt %s', ), ) inputs = MathsCommand.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def create_workflow_spatialobject_fsl(): replace_nan = Node(interface=MathsCommand(), name='replace_nan') replace_nan.inputs.nan2zeros = True # GLM design = Node(interface=fsl_design(), name='design') design.inputs.interscan_interval = .85 design.inputs.bases = {'gamma': {'derivs': False}} design.inputs.model_serial_correlations = True design.inputs.contrasts = [('Faces', 'T', ['FACES', 'HOUSES', 'LETTERS'], [1, 0, 0]), ('Houses', 'T', ['FACES', 'HOUSES', 'LETTERS'], [0, 1, 0]), ('Letters', 'T', ['FACES', 'HOUSES', 'LETTERS'], [0, 0, 1])] modelgen = Node(interface=FEATModel(), name='glm') estimate = Node(interface=FILMGLS(), name="estimate") estimate.inputs.smooth_autocorr = True estimate.inputs.mask_size = 5 estimate.inputs.threshold = 1000 w = Workflow(name='spatialobject_fsl') w.connect(input_node, 'bold', replace_nan, 'in_file') w.connect(replace_nan, 'out_file', model, 'functional_runs') w.connect(input_node, 'events', model, 'bids_event_file') w.connect(model, 'session_info', design, 'session_info') w.connect(design, 'fsf_files', modelgen, 'fsf_file') w.connect(design, 'ev_files', modelgen, 'ev_files') w.connect(modelgen, 'design_file', estimate, 'design_file') w.connect(replace_nan, 'out_file', estimate, 'in_file') w.connect(modelgen, 'con_file', estimate, 'tcon_file') w.connect(estimate, 'zstats', output_node, 'T_image') return w
import os from os.path import abspath from datetime import datetime from IPython.display import Image import pydot from nipype import Workflow, Node, MapNode, Function, config from nipype.interfaces.fsl import TOPUP, ApplyTOPUP, BET, ExtractROI, Eddy, FLIRT, FUGUE from nipype.interfaces.fsl.maths import MathsCommand import nipype.interfaces.utility as util import nipype.interfaces.mrtrix3 as mrt #Requirements for the workflow to run smoothly: All files as in NIfTI-format and named according to the following standard: #Images are from the tonotopy DKI sequences on the 7T Philips Achieva scanner in Lund. It should work with any DKI sequence and possibly also a standard DTI but the setting for B0-corrections, epi-distortion corrections and eddy current corrections will be wrong. #DKI file has a base name shared with bvec and bval in FSL format. E.g. "DKI.nii.gz" "DKI.bvec" and "DKI.bval". #There is one b0-volume with reversed (P->A) phase encoding called DKIbase+_revenc. E.g. "DKI_revenc.nii.gz". #Philips B0-map magnitude and phase offset (in Hz) images. #One input file for topup describing the images as specified by topup. #Set nbrOfThreads to number of available CPU threads to run the analyses. ### Need to make better revenc for the 15 version if we choose to use it (i.e. same TE and TR) #Set to relevant directory/parameters datadir=os.path.abspath("/Users/ling-men/Documents/MRData/testDKI") rawDKI_base='DKI_15' B0map_base = 'B0map' nbrOfThreads=6 print_graph = True acqparam_file = os.path.join(datadir,'acqparams.txt') index_file = os.path.join(datadir,'index.txt') #### #config.enable_debug_mode() DKI_nii=os.path.join(datadir, rawDKI_base+'.nii.gz') DKI_bval=os.path.join(datadir, rawDKI_base+'.bval')
def create_workflow_temporalpatterns_fsl(): replace_nan = Node(interface=MathsCommand(), name='replace_nan') replace_nan.inputs.nan2zeros = True # GLM design = Node(interface=fsl_design(), name='design') design.inputs.interscan_interval = .85 design.inputs.bases = {'gamma': {'derivs': False}} design.inputs.model_serial_correlations = True design.inputs.contrasts = [ ( 'OnePulse', 'T', [ 'ONEPULSE-1', 'ONEPULSE-2', 'ONEPULSE-3', 'ONEPULSE-4', 'ONEPULSE-5', 'ONEPULSE-6', ], [1, 1, 1, 1, 1, 1], ), ( 'TwoPulses', 'T', [ 'TWOPULSE-1', 'TWOPULSE-2', 'TWOPULSE-3', 'TWOPULSE-4', 'TWOPULSE-5', 'TWOPULSE-6', ], [1, 1, 1, 1, 1, 1], ), ( 'OnePulse_linear', 'T', [ 'ONEPULSE-1', 'ONEPULSE-2', 'ONEPULSE-3', 'ONEPULSE-4', 'ONEPULSE-5', 'ONEPULSE-6', ], [-3, -2, -1, 1, 2, 3], ), ( 'TwoPulse_linear', 'T', [ 'TWOPULSE-1', 'TWOPULSE-2', 'TWOPULSE-3', 'TWOPULSE-4', 'TWOPULSE-5', 'TWOPULSE-6', ], [-3, -2, -1, 1, 2, 3], ), ] modelgen = Node(interface=FEATModel(), name='glm') estimate = Node(interface=FILMGLS(), name="estimate") estimate.inputs.smooth_autocorr = True estimate.inputs.mask_size = 5 estimate.inputs.threshold = 1000 w = Workflow(name='temporalpattern_fsl') w.connect(input_node, 'bold', replace_nan, 'in_file') w.connect(replace_nan, 'out_file', model, 'functional_runs') w.connect(input_node, 'events', model, 'bids_event_file') w.connect(model, 'session_info', design, 'session_info') w.connect(design, 'fsf_files', modelgen, 'fsf_file') w.connect(design, 'ev_files', modelgen, 'ev_files') w.connect(modelgen, 'design_file', estimate, 'design_file') w.connect(replace_nan, 'out_file', estimate, 'in_file') w.connect(modelgen, 'con_file', estimate, 'tcon_file') w.connect(estimate, 'zstats', output_node, 'T_image') return w
# define final operational strings to be used gm_str.append(('-div %s' % len(gm_list))) op_string_gm = " ".join((gm_str)) op_string_rest = " ".join((rest_str)) # get group level probabilistic GM by averaging maths = MultiImageMaths() maths.inputs.in_file = gm_list[0] maths.inputs.op_string = op_string_gm maths.inputs.operand_files = gm_list[1:] maths.inputs.out_file = 'gm_prob_mni3_ave.nii.gz' maths.run() #print maths.cmdline # get GM mask (binarize the probabilistic GM map) binarize = MathsCommand() binarize.inputs.args = '-thr 0.30 -bin' binarize.inputs.in_file = 'gm_prob_mni3_ave.nii.gz' binarize.inputs.out_file = 'gm_prob_mni3_ave_mask.nii.gz' binarize.run() # get group level resting mask by multiplying individual ones maths = MultiImageMaths() maths.inputs.in_file = rest_list[0] maths.inputs.op_string = op_string_rest maths.inputs.operand_files = rest_list[1:] maths.inputs.out_file = 'rest_mask_mni3_ave.nii.gz' maths.run() ############# Step 2 ####################################### maths = MultiImageMaths()
def create_subject_ffx_wf( sub_id, bet_fracthr, spatial_fwhm, susan_brightthresh, hp_vols, lp_vols, remove_hemi, film_thresh, film_model_autocorr, use_derivs, tr, tcon_subtractive, cluster_threshold, cluster_thresh_frac, cluster_p, dilate_clusters_voxel, cond_ids, dsdir, work_basedir): # todo: new mapnode inputs: cluster_threshold, cluster_p """ Make a workflow including preprocessing, first level, and second level GLM analysis for a given subject. This pipeline includes: - skull stripping - spatial smoothing - removing the irrelevant hemisphere - temporal band pass filter - 1st level GLM - averaging f-contrasts from 1st level GLM - clustering run-wise f-tests, dilating clusters, and returning binary roi mask """ from nipype.algorithms.modelgen import SpecifyModel from nipype.interfaces.fsl import BET, SUSAN, ImageMaths from nipype.interfaces.fsl.model import SmoothEstimate, Cluster from nipype.interfaces.fsl.maths import TemporalFilter, MathsCommand from nipype.interfaces.utility import Function from nipype.pipeline.engine import Workflow, Node, MapNode from nipype.workflows.fmri.fsl import create_modelfit_workflow from nipype.interfaces.fsl.maths import MultiImageMaths from nipype.interfaces.utility import IdentityInterface import sys from os.path import join as pjoin import os sys.path.insert( 0, "/data/project/somato/raw/code/roi_glm/custom_node_functions.py") # TODO: don't hardcode this import custom_node_functions # set up sub-workflow sub_wf = Workflow(name='subject_%s_wf' % sub_id) # set up sub-working-directory subwf_wd = pjoin(work_basedir, 'subject_ffx_wfs', 'subject_%s_ffx_workdir' % sub_id) if not os.path.exists(subwf_wd): os.makedirs(subwf_wd) sub_wf.base_dir = subwf_wd # Grab bold files for all four runs of one subject. # in the order [d1_d5, d5_d1, blocked_design1, blocked_design2] grab_boldfiles = Node(Function( function=custom_node_functions.grab_boldfiles_subject, input_names=['sub_id', 'cond_ids', 'ds_dir'], output_names=['boldfiles']), name='grab_boldfiles') grab_boldfiles.inputs.sub_id = sub_id grab_boldfiles.inputs.cond_ids = cond_ids grab_boldfiles.inputs.ds_dir = dsdir getonsets = Node(Function( function=custom_node_functions.grab_blocked_design_onsets_subject, input_names=['sub_id', 'prepped_ds_dir'], output_names=['blocked_design_onsets_dicts']), name='getonsets') getonsets.inputs.sub_id = sub_id getonsets.inputs.prepped_ds_dir = dsdir # pass bold files through preprocessing pipeline bet = MapNode(BET(frac=bet_fracthr, functional=True, mask=True), iterfield=['in_file'], name='bet') pick_mask = Node(Function(function=custom_node_functions.pick_first_mask, input_names=['mask_files'], output_names=['first_mask']), name='pick_mask') # SUSAN smoothing node susan = MapNode(SUSAN(fwhm=spatial_fwhm, brightness_threshold=susan_brightthresh), iterfield=['in_file'], name='susan') # bandpass filter node bpf = MapNode(TemporalFilter(highpass_sigma=hp_vols / 2.3548, lowpass_sigma=lp_vols / 2.3548), iterfield=['in_file'], name='bpf') # cut away hemisphere node if remove_hemi == 'r': roi_args = '-roi 96 -1 0 -1 0 -1 0 -1' elif remove_hemi == 'l': roi_args = '-roi 0 96 0 -1 0 -1 0 -1' else: raise IOError('did not recognite value of remove_hemi %s' % remove_hemi) cut_hemi_func = MapNode(MathsCommand(), iterfield=['in_file'], name='cut_hemi_func') cut_hemi_func.inputs.args = roi_args cut_hemi_mask = MapNode(MathsCommand(), iterfield=['in_file'], name='cut_hemi_mask') cut_hemi_mask.inputs.args = roi_args # Make Design and Contrasts for that subject # subject_info ist a list of two "Bunches", each for one run, containing conditions, onsets, durations designgen = Node(Function( input_names=['subtractive_contrast', 'blocked_design_onsets_dicts'], output_names=['subject_info', 'contrasts'], function=custom_node_functions.make_bunch_and_contrasts), name='designgen') designgen.inputs.subtractive_contrasts = tcon_subtractive # create 'session_info' for modelfit modelspec = MapNode(SpecifyModel(input_units='secs'), name='modelspec', iterfield=['functional_runs', 'subject_info']) modelspec.inputs.high_pass_filter_cutoff = hp_vols * tr modelspec.inputs.time_repetition = tr flatten_session_infos = Node(Function( input_names=['nested_list'], output_names=['flat_list'], function=custom_node_functions.flatten_nested_list), name='flatten_session_infos') # Fist-level workflow modelfit = create_modelfit_workflow(f_contrasts=True) modelfit.inputs.inputspec.interscan_interval = tr modelfit.inputs.inputspec.film_threshold = film_thresh modelfit.inputs.inputspec.model_serial_correlations = film_model_autocorr modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivs}} # node that reshapes list of copes returned from modelfit cope_sorter = Node(Function(input_names=['copes', 'varcopes', 'contrasts'], output_names=['copes', 'varcopes', 'n_runs'], function=custom_node_functions.sort_copes), name='cope_sorter') # average zfstats from both runs split_zfstats = Node(Function( function=custom_node_functions.split_zfstats_runs, input_names=['zfstats_list'], output_names=['zfstat_run1', 'zfstat_run2']), name='split_zfstats') average_zfstats = Node(MultiImageMaths(op_string='-add %s -div 2'), name='mean_images') # estimate smoothness of 1st lvl zf-files smoothest = MapNode(SmoothEstimate(), name='smoothest', iterfield=['mask_file', 'zstat_file']) cluster = MapNode(Cluster(), name='cluster', iterfield=['in_file', 'volume', 'dlh']) cluster.inputs.threshold = cluster_threshold cluster.inputs.pthreshold = cluster_p cluster.inputs.fractional = cluster_thresh_frac cluster.inputs.no_table = True cluster.inputs.out_threshold_file = True cluster.inputs.out_pval_file = True cluster.inputs.out_localmax_vol_file = True cluster.inputs.out_max_file = True cluster.inputs.out_size_file = True # dilate clusters dilate = MapNode(MathsCommand(args='-kernel sphere %i -dilD' % dilate_clusters_voxel), iterfield=['in_file'], name='dilate') # binarize the result to a mask binarize_roi = MapNode(ImageMaths(op_string='-nan -thr 0.001 -bin'), iterfield=['in_file'], name='binarize_roi') # connect preprocessing sub_wf.connect(grab_boldfiles, 'boldfiles', bet, 'in_file') sub_wf.connect(bet, 'out_file', susan, 'in_file') sub_wf.connect(susan, 'smoothed_file', bpf, 'in_file') sub_wf.connect(bpf, 'out_file', cut_hemi_func, 'in_file') sub_wf.connect(bet, 'mask_file', cut_hemi_mask, 'in_file') # connect to 1st level model sub_wf.connect(cut_hemi_func, 'out_file', modelspec, 'functional_runs') sub_wf.connect(getonsets, 'blocked_design_onsets_dicts', designgen, 'blocked_design_onsets_dicts') sub_wf.connect(designgen, 'subject_info', modelspec, 'subject_info') sub_wf.connect(modelspec, 'session_info', flatten_session_infos, 'nested_list') sub_wf.connect(flatten_session_infos, 'flat_list', modelfit, 'inputspec.session_info') sub_wf.connect(designgen, 'contrasts', modelfit, 'inputspec.contrasts') sub_wf.connect(cut_hemi_func, 'out_file', modelfit, 'inputspec.functional_data') # connect to cluster thresholding sub_wf.connect(cut_hemi_mask, 'out_file', smoothest, 'mask_file') sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', smoothest, 'zstat_file') sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', cluster, 'in_file') sub_wf.connect(smoothest, 'dlh', cluster, 'dlh') sub_wf.connect(smoothest, 'volume', cluster, 'volume') sub_wf.connect(cluster, 'threshold_file', dilate, 'in_file') sub_wf.connect(dilate, 'out_file', binarize_roi, 'in_file') # connect to averaging f-contrasts sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', split_zfstats, 'zfstats_list') sub_wf.connect(split_zfstats, 'zfstat_run1', average_zfstats, 'in_file') sub_wf.connect(split_zfstats, 'zfstat_run2', average_zfstats, 'operand_files') # redirect to outputspec # TODO: redirekt outputspec to datasink in meta-wf outputspec = Node(IdentityInterface(fields=[ 'threshold_file', 'index_file', 'pval_file', 'localmax_txt_file' ]), name='outputspec') sub_wf.connect(cluster, 'threshold_file', outputspec, 'threshold_file') sub_wf.connect(cluster, 'index_file', outputspec, 'index_file') sub_wf.connect(cluster, 'pval_file', outputspec, 'pval_file') sub_wf.connect(cluster, 'localmax_txt_file', outputspec, 'localmax_txt_file') sub_wf.connect(binarize_roi, 'out_file', outputspec, 'roi') # run subject-lvl workflow # sub_wf.write_graph(graph2use='colored', dotfilename='./subwf_graph.dot') # sub_wf.run(plugin='MultiProc', plugin_args={'n_procs': 6}) # sub_wf.run(plugin='CondorDAGMan') # sub_wf.run() return sub_wf