def test_node_hash(tmpdir): from nipype.interfaces.utility import Function tmpdir.chdir() config.set_default_config() config.set('execution', 'stop_on_first_crash', True) config.set('execution', 'crashdump_dir', os.getcwd()) def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), name='n1') n2 = pe.Node(Function(input_names=['a'], output_names=['b'], function=func2), name='n2') w1 = pe.Workflow(name='test') def modify(x): return x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = os.getcwd() # create dummy distributed plugin class from nipype.pipeline.plugins.base import DistributedPluginBase # create a custom exception class EngineTestException(Exception): pass class RaiseError(DistributedPluginBase): def _submit_job(self, node, updatehash=False): raise EngineTestException('Submit called - cached=%s, updated=%s' % node.is_cached()) # check if a proper exception is raised with pytest.raises(EngineTestException) as excinfo: w1.run(plugin=RaiseError()) assert str(excinfo.value).startswith('Submit called') # generate outputs w1.run(plugin='Linear') # ensure plugin is being called config.set('execution', 'local_hash_check', False) # rerun to ensure we have outputs w1.run(plugin='Linear') # set local check config.set('execution', 'local_hash_check', True) w1 = pe.Workflow(name='test') w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = os.getcwd() w1.run(plugin=RaiseError())
def test_mapnode_nested(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype import MapNode, Function def func1(in1): return in1 + 1 n1 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], nested=True, name='n1') n1.inputs.in1 = [[1, [2]], 3, [4, 5]] n1.run() print(n1.get_output('out')) yield assert_equal, n1.get_output('out'), [[2, [3]], 4, [5, 6]] n2 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], nested=False, name='n1') n2.inputs.in1 = [[1, [2]], 3, [4, 5]] error_raised = False try: n2.run() except Exception as e: pe.logger.info('Exception: %s' % str(e)) error_raised = True yield assert_true, error_raised
def test_old_config(tmpdir): wd = str(tmpdir) os.chdir(wd) from nipype.interfaces.utility import Function def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), name='n1') n2 = pe.Node(Function(input_names=['a'], output_names=['b'], function=func2), name='n2') w1 = pe.Workflow(name='test') modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd w1.config['execution']['crashdump_dir'] = wd # generate outputs error_raised = False try: w1.run(plugin='Linear') except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True assert not error_raised
def test_old_config(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype.interfaces.utility import Function def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), name='n1') n2 = pe.Node(Function(input_names=['a'], output_names=['b'], function=func2), name='n2') w1 = pe.Workflow(name='test') modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd w1.config['execution']['crashdump_dir'] = wd # generate outputs error_raised = False try: w1.run(plugin='Linear') except Exception as e: pe.logger.info('Exception: %s' % str(e)) error_raised = True yield assert_false, error_raised os.chdir(cwd) rmtree(wd)
def test_old_config(tmpdir): tmpdir.chdir() wd = os.getcwd() from nipype.interfaces.utility import Function def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), name='n1') n2 = pe.Node(Function(input_names=['a'], output_names=['b'], function=func2), name='n2') w1 = pe.Workflow(name='test') modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd w1.config['execution']['crashdump_dir'] = wd # generate outputs w1.run(plugin='Linear')
def test_mapnode_nested(tmpdir): tmpdir.chdir() from nipype import MapNode, Function def func1(in1): return in1 + 1 n1 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], nested=True, name='n1') n1.inputs.in1 = [[1, [2]], 3, [4, 5]] n1.run() print(n1.get_output('out')) assert n1.get_output('out') == [[2, [3]], 4, [5, 6]] n2 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], nested=False, name='n1') n2.inputs.in1 = [[1, [2]], 3, [4, 5]] with pytest.raises(Exception) as excinfo: n2.run() assert "can only concatenate list" in str(excinfo.value)
def create_graphics(bids_dir, subject_label, session=None, t2w=None): """ Setup and run the graphics workflow which creates the static plot(s) of defaced images with a brainmask overlaid and a gif looping through slices of the defaced images. Parameters ---------- bids_dir : str Path to BIDS root directory. subject_label : str Label of subject to be plotted (without 'sub-'). session : str, optional If multiple sessions exist, include them in worklow. session : bool, optional If T2w image exists, include them in worklow. """ import nipype.pipeline.engine as pe from nipype import Function from nipype.interfaces import utility as niu report_wf = pe.Workflow('report_wf') inputnode = pe.Node(niu.IdentityInterface( fields=['bids_dir', 'subject_label', 'session', 't2w']), name='inputnode') plt_defaced = pe.Node(Function( input_names=['bids_dir', 'subject_label', 'session', 't2w'], function=plot_defaced), name='plt_defaced') gf_defaced = pe.Node(Function( input_names=['bids_dir', 'subject_label', 'session', 't2w'], function=gif_defaced), name='gf_defaced') report_wf.connect([ (inputnode, plt_defaced, [('bids_dir', 'bids_dir'), ('subject_label', 'subject_label')]), (inputnode, gf_defaced, [('bids_dir', 'bids_dir'), ('subject_label', 'subject_label')]), ]) if session: inputnode.inputs.session = session report_wf.connect([ (inputnode, plt_defaced, [('session', 'session')]), (inputnode, gf_defaced, [('session', 'session')]), ]) if t2w: inputnode.inputs.t2w = t2w report_wf.connect([ (inputnode, plt_defaced, [('t2w', 't2w')]), (inputnode, gf_defaced, [('t2w', 't2w')]), ]) inputnode.inputs.bids_dir = bids_dir inputnode.inputs.subject_label = subject_label report_wf.run()
def test_mapnode_nested(tmpdir): os.chdir(str(tmpdir)) from nipype import MapNode, Function def func1(in1): return in1 + 1 n1 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], nested=True, name='n1') n1.inputs.in1 = [[1, [2]], 3, [4, 5]] n1.run() print(n1.get_output('out')) assert n1.get_output('out') == [[2, [3]], 4, [5, 6]] n2 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], nested=False, name='n1') n2.inputs.in1 = [[1, [2]], 3, [4, 5]] error_raised = False try: n2.run() except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True assert error_raised
def test_node_hash(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype.interfaces.utility import Function def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), name='n1') n2 = pe.Node(Function(input_names=['a'], output_names=['b'], function=func2), name='n2') w1 = pe.Workflow(name='test') modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd # generate outputs w1.run(plugin='Linear') # ensure plugin is being called w1.config['execution'] = {'stop_on_first_crash': 'true', 'local_hash_check': 'false', 'crashdump_dir': wd} error_raised = False # create dummy distributed plugin class from nipype.pipeline.plugins.base import DistributedPluginBase class RaiseError(DistributedPluginBase): def _submit_job(self, node, updatehash=False): raise Exception('Submit called') try: w1.run(plugin=RaiseError()) except Exception as e: pe.logger.info('Exception: %s' % str(e)) error_raised = True yield assert_true, error_raised # yield assert_true, 'Submit called' in e # rerun to ensure we have outputs w1.run(plugin='Linear') # set local check w1.config['execution'] = {'stop_on_first_crash': 'true', 'local_hash_check': 'true', 'crashdump_dir': wd} error_raised = False try: w1.run(plugin=RaiseError()) except Exception as e: pe.logger.info('Exception: %s' % str(e)) error_raised = True yield assert_false, error_raised os.chdir(cwd) rmtree(wd)
def test_node_hash(tmpdir): wd = str(tmpdir) os.chdir(wd) from nipype.interfaces.utility import Function def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), name='n1') n2 = pe.Node(Function(input_names=['a'], output_names=['b'], function=func2), name='n2') w1 = pe.Workflow(name='test') modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd # generate outputs w1.run(plugin='Linear') # ensure plugin is being called w1.config['execution'] = { 'stop_on_first_crash': 'true', 'local_hash_check': 'false', 'crashdump_dir': wd } # create dummy distributed plugin class from nipype.pipeline.plugins.base import DistributedPluginBase # create a custom exception class EngineTestException(Exception): pass class RaiseError(DistributedPluginBase): def _submit_job(self, node, updatehash=False): raise EngineTestException('Submit called') # check if a proper exception is raised with pytest.raises(EngineTestException) as excinfo: w1.run(plugin=RaiseError()) assert 'Submit called' == str(excinfo.value) # rerun to ensure we have outputs w1.run(plugin='Linear') # set local check w1.config['execution'] = { 'stop_on_first_crash': 'true', 'local_hash_check': 'true', 'crashdump_dir': wd } w1.run(plugin=RaiseError())
def test_execute(self, lyman_dir, execdir): info = frontend.info(lyman_dir=lyman_dir) def f(x): return x**2 assert f(2) == 4 n1 = Node(Function("x", "y", f), "n1") n2 = Node(Function("x", "y", f), "n2") wf = Workflow("test", base_dir=info.cache_dir) wf.connect(n1, "y", n2, "x") wf.inputs.n1.x = 2 cache_dir = execdir.join("cache").join("test") class args(object): graph = False n_procs = 1 debug = False clear_cache = True execute = True frontend.execute(wf, args, info) assert not cache_dir.exists() args.debug = True frontend.execute(wf, args, info) assert cache_dir.exists() args.debug = False info.remove_cache = False frontend.execute(wf, args, info) assert cache_dir.exists() args.execute = False res = frontend.execute(wf, args, info) assert res is None args.execute = True fname = str(execdir.join("graph").join("workflow.dot")) args.graph = fname res = frontend.execute(wf, args, info) assert res == fname[:-4] + ".svg" args.graph = True args.stage = "preproc" res = frontend.execute(wf, args, info) assert res == cache_dir.join("preproc.svg")
def neck_removal_wf(usemodel): """Create a workflow to to remove the neck. This workflow requires a model image (e.g. an MNI standard) and points on that image. The model is registered to the T1 image, and the points transformed into T1 space. The inferior most transformed point is used to determine the cutting plane, which is aligned with the voxel coordinates. :return: A :py:mod:`nipype` workflow Workflow inputs/outputs :param inputspec.T1: The T1 image to remove the neck from :param inputspec.model: The reference image to register to the T1 image :param inputspec.limits: Points in model roughly indicating the ideal cutting plane :return: A :py:mod:`nipype` node """ name = 'neck_removal' wf = pe.Workflow(name) inputspec = pe.Node(IdentityInterface(['T1', 'model', 'limits']), name='inputspec') wpoints = pe.Node(Function(input_names=['limits'], output_names=['points'], function=writepoints), name='write_points') cut = pe.Node(CutImage(neckonly=True), name='cut') outputspec = pe.Node(IdentityInterface(['cropped']), name='outputspec') if usemodel: trpoints = _tr_points_wf() wf.connect([(inputspec, trpoints, [('T1', 'inputspec.T1'), ('model', 'inputspec.model')]), (wpoints, trpoints, [('points', 'inputspec.points')]), (trpoints, cut, [('outputspec.out_points', 'points_file')])]) else: wf.connect([(wpoints, cut, [('points', 'points_file')])]) wf.connect([(inputspec, wpoints, [('limits', 'limits')]), (inputspec, cut, [('T1', 'in_file')]), (cut, outputspec, [('out_file', 'cropped')])]) return wf
def embed_metadata_from_dicoms(bids_options, item_dicoms, outname, outname_bids, prov_file, scaninfo, tempdirs, with_prov): """ Enhance sidecar information file with more information from DICOMs Parameters ---------- bids_options item_dicoms outname outname_bids prov_file scaninfo tempdirs with_prov Returns ------- """ from nipype import Node, Function tmpdir = tempdirs(prefix='embedmeta') # We need to assure that paths are absolute if they are relative item_dicoms = list(map(op.abspath, item_dicoms)) embedfunc = Node(Function(input_names=[ 'dcmfiles', 'niftifile', 'infofile', 'bids_info', ], function=embed_dicom_and_nifti_metadata), name='embedder') embedfunc.inputs.dcmfiles = item_dicoms embedfunc.inputs.niftifile = op.abspath(outname) embedfunc.inputs.infofile = op.abspath(scaninfo) embedfunc.inputs.bids_info = load_json( op.abspath(outname_bids)) if (bids_options is not None) else None embedfunc.base_dir = tmpdir cwd = os.getcwd() lgr.debug("Embedding into %s based on dicoms[0]=%s for nifti %s", scaninfo, item_dicoms[0], outname) try: if op.lexists(scaninfo): # TODO: handle annexed file case if not op.islink(scaninfo): set_readonly(scaninfo, False) res = embedfunc.run() set_readonly(scaninfo) if with_prov: g = res.provenance.rdf() g.parse(prov_file, format='turtle') g.serialize(prov_file, format='turtle') set_readonly(prov_file) except Exception as exc: lgr.error("Embedding failed: %s", str(exc)) os.chdir(cwd)
def NodeJoinFeatures(): node = Node(Function( function=joinFeatures, input_names=["data", "prefix", "output_dir", "confName", "kindConn"], output_names=["graphFeatures"]), name="JoinFeatures") return node
def run_brain_extraction_nb(image, subject_label, bids_dir): """ Setup and run nobrainer brainextraction workflow. Parameters ---------- image : str Path to image that should be defaced. outfile : str Name of the defaced file. bids_dir : str Path to BIDS root directory. """ brainextraction_wf = pe.Workflow('brainextraction_wf') inputnode = pe.Node(niu.IdentityInterface(['in_file']), name='inputnode') brainextraction = pe.Node(Function( input_names=['image', 'subject_label', 'bids_dir'], output_names=['outfile'], function=brain_extraction_nb), name='brainextraction') brainextraction_wf.connect([(inputnode, brainextraction, [('in_file', 'image')])]) inputnode.inputs.in_file = image brainextraction.inputs.subject_label = subject_label brainextraction.inputs.bids_dir = bids_dir brainextraction_wf.run()
def run_t2w_deface(image, t1w_deface_mask, outfile): """ Setup and run t2w defacing workflow. Parameters ---------- image : str Path to image that should be defaced. t1w_deface_mask : str Path to the defaced T1w image that will be used as defacing mask. outfile : str Name of the defaced file. """ from bidsonym.utils import deface_t2w deface_wf = pe.Workflow('deface_wf') inputnode = pe.Node(niu.IdentityInterface(['in_file']), name='inputnode') flirtnode = pe.Node(FLIRT(cost_func='mutualinfo', output_type="NIFTI_GZ"), name='flirtnode') deface_t2w = pe.Node(Function(input_names=['image', 'warped_mask', 'outfile'], output_names=['outfile'], function=deface_t2w), name='deface_t2w') deface_wf.connect([(inputnode, flirtnode, [('in_file', 'reference')]), (inputnode, deface_t2w, [('in_file', 'image')]), (flirtnode, deface_t2w, [('out_file', 'warped_mask')])]) inputnode.inputs.in_file = image flirtnode.inputs.in_file = t1w_deface_mask deface_t2w.inputs.outfile = outfile deface_wf.run()
def NodePandasAdj2Nx(): node = Node(Function(function=pandasAdj2Nx, input_names=["df"], output_names=["graph"]), name="Pandas2Graph") return node
def run_deepdefacer(image, subject_label, bids_dir): """ Setup and run mridefacer workflow. Parameters ---------- image : str Path to image that should be defaced. subject_label : str Label of subject to operate on (without 'sub-'). bids_dir : str Path to BIDS root directory. """ deface_wf = pe.Workflow('deface_wf') inputnode = pe.Node(niu.IdentityInterface(['in_file']), name='inputnode') deepdefacer = pe.Node(Function(input_names=['image', 'subject_label', 'bids_dir'], output_names=['outfile'], function=deepdefacer_cmd), name='deepdefacer') deface_wf.connect([(inputnode, deepdefacer, [('in_file', 'image')])]) inputnode.inputs.in_file = image deepdefacer.inputs.subject_label = subject_label deepdefacer.inputs.bids_dir = bids_dir deface_wf.run()
def test_io_subclass(): """Ensure any io subclass allows dynamic traits""" from nipype.interfaces.io import IOBase from nipype.interfaces.base import DynamicTraitedSpec class TestKV(IOBase): _always_run = True output_spec = DynamicTraitedSpec def _list_outputs(self): outputs = {} outputs['test'] = 1 outputs['foo'] = 'bar' return outputs wf = pe.Workflow('testkv') def testx2(test): return test * 2 kvnode = pe.Node(TestKV(), name='testkv') from nipype.interfaces.utility import Function func = pe.Node( Function(input_names=['test'], output_names=['test2'], function=testx2), name='func') exception_not_raised = True try: wf.connect(kvnode, 'test', func, 'test') except Exception as e: if 'Module testkv has no output called test' in e: exception_not_raised = False assert exception_not_raised
def test_serial_input(tmpdir): tmpdir.chdir() wd = os.getcwd() from nipype import MapNode, Function, Workflow def func1(in1): return in1 n1 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], name='n1') n1.inputs.in1 = [1, 2, 3] w1 = Workflow(name='test') w1.base_dir = wd w1.add_nodes([n1]) # set local check w1.config['execution'] = {'stop_on_first_crash': 'true', 'local_hash_check': 'true', 'crashdump_dir': wd, 'poll_sleep_duration': 2} # test output of num_subnodes method when serial is default (False) assert n1.num_subnodes() == len(n1.inputs.in1) # test running the workflow on default conditions w1.run(plugin='MultiProc') # test output of num_subnodes method when serial is True n1._serial = True assert n1.num_subnodes() == 1 # test running the workflow on serial conditions w1.run(plugin='MultiProc')
def bids(self): from bids_conversion import create_bids if not getattr(self,'_bids',None): self._bids = pe.Node(name="bids", interface=Function( input_names=["dicom_info","bids_info","bids_output","subj","ses"], output_names=["out"], function=create_bids)) self._bids.inputs.ses = 'bl' return self._bids
def embed_metadata_from_dicoms(bids, item_dicoms, outname, outname_bids, prov_file, scaninfo, tempdirs, with_prov, min_meta): """ Enhance sidecar information file with more information from DICOMs Parameters ---------- bids item_dicoms outname outname_bids prov_file scaninfo tempdirs with_prov min_meta Returns ------- """ from nipype import Node, Function tmpdir = tempdirs(prefix='embedmeta') embedfunc = Node(Function(input_names=[ 'dcmfiles', 'niftifile', 'infofile', 'bids_info', 'force', 'min_meta' ], output_names=['outfile', 'meta'], function=embed_nifti), name='embedder') embedfunc.inputs.dcmfiles = item_dicoms embedfunc.inputs.niftifile = op.abspath(outname) embedfunc.inputs.infofile = op.abspath(scaninfo) embedfunc.inputs.min_meta = min_meta if bids: embedfunc.inputs.bids_info = load_json(op.abspath(outname_bids)) else: embedfunc.inputs.bids_info = None embedfunc.inputs.force = True embedfunc.base_dir = tmpdir cwd = os.getcwd() try: if op.lexists(scaninfo): # TODO: handle annexed file case if not op.islink(scaninfo): set_readonly(scaninfo, False) res = embedfunc.run() set_readonly(scaninfo) if with_prov: g = res.provenance.rdf() g.parse(prov_file, format='turtle') g.serialize(prov_file, format='turtle') set_readonly(prov_file) except Exception as exc: lgr.error("Embedding failed: %s", str(exc)) os.chdir(cwd)
def test_serial_input(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype import MapNode, Function, Workflow def func1(in1): return in1 n1 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], name='n1') n1.inputs.in1 = [1, 2, 3] w1 = Workflow(name='test') w1.base_dir = wd w1.add_nodes([n1]) # set local check w1.config['execution'] = { 'stop_on_first_crash': 'true', 'local_hash_check': 'true', 'crashdump_dir': wd, 'poll_sleep_duration': 2 } # test output of num_subnodes method when serial is default (False) yield assert_equal, n1.num_subnodes(), len(n1.inputs.in1) # test running the workflow on default conditions error_raised = False try: w1.run(plugin='MultiProc') except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True yield assert_false, error_raised # test output of num_subnodes method when serial is True n1._serial = True yield assert_equal, n1.num_subnodes(), 1 # test running the workflow on serial conditions error_raised = False try: w1.run(plugin='MultiProc') except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True yield assert_false, error_raised os.chdir(cwd) rmtree(wd)
def get_time_series_extractor_node(): return Node(Function(function=extract_time_series, input_names=[ "lh_surf", "rh_surf", "lh_annot", "rh_annot", "output_dir", "prefix", "confoundsName" ], output_names=["time_series", "roiLabels", "confName"]), name="SurfaceTimeSeriesExtractor")
def computed_avg_node(node_name, nnodes, work_dir, chunk=None, delay=0, benchmark_dir=None, benchmark=False, cli=False, avg=None): files = get_partitions(chunk, nnodes) if delay is None: delay = 0 ca_name = 'ca1_{0}'.format(node_name) ca2_name = 'ca2_{0}'.format(node_name) ca_1 = MapNode(Function(input_names=[ 'chunk', 'delay', 'benchmark', 'benchmark_dir', 'cli', 'wf_name', 'avg', 'work_dir' ], output_names=['inc_chunk'], function=increment_wf), name=ca_name, iterfield='chunk') ca_1.inputs.chunk = files ca_1.inputs.delay = delay ca_1.inputs.benchmark = benchmark ca_1.inputs.benchmark_dir = benchmark_dir ca_1.inputs.cli = cli ca_1.inputs.wf_name = 'incwf_{}'.format(ca_name) ca_1.inputs.avg = avg ca_1.inputs.work_dir = work_dir ca_2 = Node(Function(input_names=['chunks', 'benchmark', 'benchmark_dir'], output_names=['avg_chunk'], function=compute_avg), name=ca2_name) ca_2.inputs.benchmark = benchmark ca_2.inputs.benchmark_dir = benchmark_dir return ca_1, ca_2
def test_mapnode_iterfield_type(x_inp, f_exp): from nipype import MapNode, Function def double_func(x): return 2 * x double = Function(["x"], ["f_x"], double_func) double_node = MapNode(double, name="double", iterfield=["x"]) double_node.inputs.x = x_inp res = double_node.run() assert res.outputs.f_x == f_exp
def sum2args(): """ Return a nipype function that sums up two args: `arg1` and `arg2` and leaves the result in `out`. Returns ------- fi: nipype.interfaces.utility.Function """ func = 'def func(arg1, arg2): return arg1 + arg2' fi = Function(input_names=['arg1', 'arg2'], output_names=['out']) fi.inputs.function_str = func return fi
def run_mri_deface(image, outfile): deface_wf = pe.Workflow('deface_wf') inputnode = pe.Node(niu.IdentityInterface(['in_file']), name='inputnode') mri_deface = pe.Node(Function(input_names=['image', 'outfile'], output_names=['outfile'], function=mri_deface_cmd), name='mri_deface') deface_wf.connect([(inputnode, mri_deface, [('in_file', 'image')])]) inputnode.inputs.in_file = image mri_deface.inputs.outfile = outfile deface_wf.run()
def run_tbss_non_FA(tbss_info_dict, maps, subjects_list, output_dir, output_name='non_fa', recalculate=True): """Run TBSS non FA on the given subjects. Args: tbss_info_dict (dict): the information dict from 'run_tbss()'. maps (dict): mapping subjects to filenames containing the map to register to the FA skeleton. subjects_list (list of str): the list of subjects names. We provide these to make sure the images are all analyzed in the correct order. output_dir (str): the output directory output_name (str): the name of the output file (without extension) Returns: The full path the output file containing the tracts. """ work_dir = os.path.join(output_dir, '_nipype_work_dir', output_name.replace('.', '_')) output_file = os.path.join(output_dir, output_name + '.nii.gz') if not recalculate and os.path.isfile(output_file): return output_file maps_list = [maps[subject] for subject in subjects_list] field_list = [ tbss_info_dict['field_list'][subject] for subject in subjects_list ] tbss_non_fa = create_tbss_non_FA(output_file=output_file) tbss_non_fa.base_dir = work_dir tbss_non_fa.inputs.inputnode.file_list = maps_list tbss_non_fa.inputs.inputnode.field_list = field_list tbss_non_fa.inputs.inputnode.skeleton_thresh = 0.2 tbss_non_fa.inputs.inputnode.groupmask = tbss_info_dict['group_mask'] tbss_non_fa.inputs.inputnode.meanfa_file = tbss_info_dict['mean_fa'] tbss_non_fa.inputs.inputnode.distance_map = tbss_info_dict['distance_map'] tbss_non_fa.inputs.inputnode.all_FA_file = tbss_info_dict['merge_fa'] data_sink = pe.Node(Function(['input'], [], function=void), 'DataSink') wf = Workflow(name='tbss_non_fa_wf', base_dir=work_dir) wf.connect([(tbss_non_fa, data_sink, [('outputnode.projected_nonFA_file', 'input')])]) wf.run(plugin='MultiProc') shutil.rmtree(work_dir) return output_file
def run_mridefacer(image, subject_label): deface_wf = pe.Workflow('deface_wf') inputnode = pe.Node(niu.IdentityInterface(['in_file']), name='inputnode') mridefacer = pe.Node(Function(input_names=['image', 'subject_label'], output_names=['outfile'], function=mri_deface_cmd), name='pydeface') mridefacer.connect([ (inputnode, mridefacer, [('in_file', 'image')]), ]) inputnode.inputs.in_file = image mridefacer.inputs.subject_label = subject_label res = deface_wf.run()