def test_mapnode_nested(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype import MapNode, Function def func1(in1): return in1 + 1 n1 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], nested=True, name='n1') n1.inputs.in1 = [[1, [2]], 3, [4, 5]] n1.run() print(n1.get_output('out')) yield assert_equal, n1.get_output('out'), [[2, [3]], 4, [5, 6]] n2 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], nested=False, name='n1') n2.inputs.in1 = [[1, [2]], 3, [4, 5]] error_raised = False try: n2.run() except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True yield assert_true, error_raised
def test_old_config(tmpdir): wd = str(tmpdir) os.chdir(wd) from nipype.interfaces.utility import Function def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), name='n1') n2 = pe.Node(Function(input_names=['a'], output_names=['b'], function=func2), name='n2') w1 = pe.Workflow(name='test') modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd w1.config['execution']['crashdump_dir'] = wd # generate outputs error_raised = False try: w1.run(plugin='Linear') except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True assert not error_raised
def test_node_hash(tmpdir): wd = str(tmpdir) os.chdir(wd) from nipype.interfaces.utility import Function def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), name='n1') n2 = pe.Node(Function(input_names=['a'], output_names=['b'], function=func2), name='n2') w1 = pe.Workflow(name='test') modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd # generate outputs w1.run(plugin='Linear') # ensure plugin is being called w1.config['execution'] = {'stop_on_first_crash': 'true', 'local_hash_check': 'false', 'crashdump_dir': wd} error_raised = False # create dummy distributed plugin class from nipype.pipeline.plugins.base import DistributedPluginBase class RaiseError(DistributedPluginBase): def _submit_job(self, node, updatehash=False): raise Exception('Submit called') try: w1.run(plugin=RaiseError()) except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True assert error_raised # rerun to ensure we have outputs w1.run(plugin='Linear') # set local check w1.config['execution'] = {'stop_on_first_crash': 'true', 'local_hash_check': 'true', 'crashdump_dir': wd} error_raised = False try: w1.run(plugin=RaiseError()) except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True assert not error_raised
def test_serial_input(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype import MapNode, Function, Workflow def func1(in1): return in1 n1 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], name='n1') n1.inputs.in1 = [1, 2, 3] w1 = Workflow(name='test') w1.base_dir = wd w1.add_nodes([n1]) # set local check w1.config['execution'] = { 'stop_on_first_crash': 'true', 'local_hash_check': 'true', 'crashdump_dir': wd, 'poll_sleep_duration': 2 } # test output of num_subnodes method when serial is default (False) yield assert_equal, n1.num_subnodes(), len(n1.inputs.in1) # test running the workflow on default conditions error_raised = False try: w1.run(plugin='MultiProc') except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True yield assert_false, error_raised # test output of num_subnodes method when serial is True n1._serial = True yield assert_equal, n1.num_subnodes(), 1 # test running the workflow on serial conditions error_raised = False try: w1.run(plugin='MultiProc') except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True yield assert_false, error_raised os.chdir(cwd) rmtree(wd)
def test_serial_input(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype import MapNode, Function, Workflow def func1(in1): return in1 n1 = MapNode(Function(input_names=['in1'], output_names=['out'], function=func1), iterfield=['in1'], name='n1') n1.inputs.in1 = [1, 2, 3] w1 = Workflow(name='test') w1.base_dir = wd w1.add_nodes([n1]) # set local check w1.config['execution'] = {'stop_on_first_crash': 'true', 'local_hash_check': 'true', 'crashdump_dir': wd, 'poll_sleep_duration': 2} # test output of num_subnodes method when serial is default (False) yield assert_equal, n1.num_subnodes(), len(n1.inputs.in1) # test running the workflow on default conditions error_raised = False try: w1.run(plugin='MultiProc') except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True yield assert_false, error_raised # test output of num_subnodes method when serial is True n1._serial = True yield assert_equal, n1.num_subnodes(), 1 # test running the workflow on serial conditions error_raised = False try: w1.run(plugin='MultiProc') except Exception as e: from nipype.pipeline.engine.base import logger logger.info('Exception: %s' % str(e)) error_raised = True yield assert_false, error_raised os.chdir(cwd) rmtree(wd)