def test_unique_join_node(): """Test join with the ``unique`` flag set to True.""" global _sum_operands _sum_operands = [] cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [3, 1, 2, 1, 3])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') wf.connect(inputspec, 'n', pre_join1, 'input1') # the set join node join = pe.JoinNode(SumInterface(), joinsource='inputspec', joinfield='input1', unique=True, name='join') wf.connect(pre_join1, 'output1', join, 'input1') wf.run() assert_equal(_sum_operands[0], [4, 2, 3], "The unique join output value is incorrect: %s." % _sum_operands[0]) os.chdir(cwd) rmtree(wd)
def test_mlab_init(): yield assert_equal(mlab.MatlabCommand._cmd, 'matlab') yield assert_equal(mlab.MatlabCommand.input_spec, mlab.MatlabInputSpec) yield assert_equal(mlab.MatlabCommand().cmd, matlab_cmd) mc = mlab.MatlabCommand(matlab_cmd='foo_m') yield assert_equal(mc.cmd, 'foo_m')
def test_set_join_node(): """Test collecting join inputs to a set.""" cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [1, 2, 1, 3, 2])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') wf.connect(inputspec, 'n', pre_join1, 'input1') # the set join node join = pe.JoinNode(SetInterface(), joinsource='inputspec', joinfield='input1', name='join') wf.connect(pre_join1, 'output1', join, 'input1') wf.run() # the join length is the number of unique inputs assert_equal(_set_len, 3, "The join Set output value is incorrect: %s." % _set_len) os.chdir(cwd) rmtree(wd)
def test1(): pipe = pe.Workflow(name='pipe') mod1 = pe.Node(interface=TestInterface(),name='mod1') pipe.add_nodes([mod1]) pipe._create_flat_graph() pipe._execgraph = pe._generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal(len(pipe._execgraph.nodes()), 1) yield assert_equal(len(pipe._execgraph.edges()), 0)
def _run_interface(self, runtime): data1 = nb.load(self.inputs.volume1).get_data() data2 = nb.load(self.inputs.volume2).get_data() assert_equal(data1, data2) return runtime
def test_itersource_join_source_node(): """Test join on an input node which has an ``itersource``.""" cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [1, 2])] # an intermediate node in the first iteration path pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') wf.connect(inputspec, 'n', pre_join1, 'input1') # an iterable pre-join node with an itersource pre_join2 = pe.Node(ProductInterface(), name='pre_join2') pre_join2.itersource = ('inputspec', 'n') pre_join2.iterables = ('input1', {1: [3, 4], 2: [5, 6]}) wf.connect(pre_join1, 'output1', pre_join2, 'input2') # an intermediate node in the second iteration path pre_join3 = pe.Node(IncrementInterface(), name='pre_join3') wf.connect(pre_join2, 'output1', pre_join3, 'input1') # the join node join = pe.JoinNode(IdentityInterface(fields=['vector']), joinsource='pre_join2', joinfield='vector', name='join') wf.connect(pre_join3, 'output1', join, 'vector') # a join successor node post_join1 = pe.Node(SumInterface(), name='post_join1') wf.connect(join, 'vector', post_join1, 'input1') result = wf.run() # the expanded graph contains # 1 pre_join1 replicate for each inputspec iteration, # 2 pre_join2 replicates for each inputspec iteration, # 1 pre_join3 for each pre_join2 iteration, # 1 join replicate for each inputspec iteration and # 1 post_join1 replicate for each join replicate = # 2 + (2 * 2) + 4 + 2 + 2 = 14 expansion graph nodes. # Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. assert_equal(len(result.nodes()), 14, "The number of expanded nodes is incorrect.") # The first join inputs are: # 1 + (3 * 2) and 1 + (4 * 2) # The second join inputs are: # 1 + (5 * 3) and 1 + (6 * 3) # the post-join nodes execution order is indeterminate; # therefore, compare the lists item-wise. assert_true([16, 19] in _sum_operands, "The join Sum input is incorrect: %s." % _sum_operands) assert_true([7, 9] in _sum_operands, "The join Sum input is incorrect: %s." % _sum_operands) os.chdir(cwd) rmtree(wd)
def test_multiple_join_nodes(): """Test two join nodes, one downstream of the other.""" global _products _products = [] cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [1, 2, 3])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') wf.connect(inputspec, 'n', pre_join1, 'input1') # the first join node join1 = pe.JoinNode(IdentityInterface(fields=['vector']), joinsource='inputspec', joinfield='vector', name='join1') wf.connect(pre_join1, 'output1', join1, 'vector') # an uniterated post-join node post_join1 = pe.Node(SumInterface(), name='post_join1') wf.connect(join1, 'vector', post_join1, 'input1') # the downstream join node connected to both an upstream join # path output and a separate input in the iterated path join2 = pe.JoinNode(IdentityInterface(fields=['vector', 'scalar']), joinsource='inputspec', joinfield='vector', name='join2') wf.connect(pre_join1, 'output1', join2, 'vector') wf.connect(post_join1, 'output1', join2, 'scalar') # a second post-join node post_join2 = pe.Node(SumInterface(), name='post_join2') wf.connect(join2, 'vector', post_join2, 'input1') # a third post-join node post_join3 = pe.Node(ProductInterface(), name='post_join3') wf.connect(post_join2, 'output1', post_join3, 'input1') wf.connect(join2, 'scalar', post_join3, 'input2') result = wf.run() # The expanded graph contains one pre_join1 replicate per inputspec # replicate and one of each remaining node = 3 + 5 = 8 nodes. # The replicated inputspec nodes are factored out of the expansion. assert_equal(len(result.nodes()), 8, "The number of expanded nodes is incorrect.") # The outputs are: # pre_join1: [2, 3, 4] # post_join1: 9 # join2: [2, 3, 4] and 9 # post_join2: 9 # post_join3: 9 * 9 = 81 assert_equal(_products, [81], "The post-join product is incorrect") os.chdir(cwd) rmtree(wd)
def test2(): pipe = pe.Workflow(name='pipe') mod1 = pe.Node(interface=TestInterface(),name='mod1') mod1.iterables = dict(input1=lambda:[1,2],input2=lambda:[1,2]) pipe.add_nodes([mod1]) pipe._create_flat_graph() pipe._execgraph = pe._generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal(len(pipe._execgraph.nodes()), 4) yield assert_equal(len(pipe._execgraph.edges()), 0)
def test5(): pipe = pe.Workflow(name='pipe') mod1 = pe.Node(interface=TestInterface(),name='mod1') mod2 = pe.Node(interface=TestInterface(),name='mod2') mod1.iterables = dict(input1=lambda:[1,2]) mod2.iterables = dict(input1=lambda:[1,2]) pipe.connect([(mod1,mod2,[('output1','input2')])]) pipe._create_flat_graph() pipe._execgraph = pe._generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal(len(pipe._execgraph.nodes()), 6) yield assert_equal(len(pipe._execgraph.edges()), 4)
def test_callback_multiproc_normal(): so = Status() wf = pe.Workflow(name='test', base_dir='/tmp') f_node = pe.Node(niu.Function(function=func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) wf.run(plugin='MultiProc', plugin_args={'status_callback': so.callback}) assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'end'
def test_split_filename(): res = split_filename('foo.nii') yield assert_equal(res, ('', 'foo', '.nii')) res = split_filename('foo.nii.gz') yield assert_equal(res, ('', 'foo', '.nii.gz')) res = split_filename('/usr/local/foo.nii.gz') yield assert_equal(res, ('/usr/local', 'foo', '.nii.gz')) res = split_filename('../usr/local/foo.nii') yield assert_equal(res, ('../usr/local', 'foo', '.nii')) res = split_filename('/usr/local/foo.a.b.c.d') yield assert_equal(res, ('/usr/local', 'foo', '.a.b.c.d'))
def test_cmdline(): basedir = mkdtemp() mi = mlab.MatlabCommand(script='whos', script_file='testscript') yield assert_equal(mi.cmdline, \ matlab_cmd + ' -nodesktop -nosplash -singleCompThread -r "fprintf(1,\'Executing code at %s:\\n\',datestr(now));ver,try,whos,catch ME,ME,ME.stack,fprintf(\'%s\\n\',ME.message);fprintf(2,\'<MatlabScriptException>\');fprintf(2,\'%s\\n\',ME.message);fprintf(2,\'File:%s\\nName:%s\\nLine:%d\\n\',ME.stack.file,ME.stack.name,ME.stack.line);fprintf(2,\'</MatlabScriptException>\');end;;exit"') yield assert_equal(mi.inputs.script, 'whos') yield assert_equal(mi.inputs.script_file, 'testscript') path_exists = os.path.exists(os.path.join(basedir,'testscript.m')) yield assert_false(path_exists) rmtree(basedir)
def test_generate_dependency_list(): pipe = pe.Workflow(name='pipe') mod1 = pe.Node(interface=TestInterface(),name='mod1') mod2 = pe.Node(interface=TestInterface(),name='mod2') pipe.connect([(mod1,mod2,[('output1','input1')])]) pipe._create_flat_graph() pipe._execgraph = pe._generate_expanded_graph(deepcopy(pipe._flatgraph)) pipe._generate_dependency_list() yield assert_false(pipe._execgraph == None) yield assert_equal(len(pipe.procs), 2) yield assert_false(pipe.proc_done[1]) yield assert_false(pipe.proc_pending[1]) yield assert_equal(pipe.depidx[0,1], 1)
def test_run_interface(): mc = mlab.MatlabCommand(matlab_cmd='foo_m') yield assert_raises(ValueError, mc.run) # script is mandatory mc.inputs.script = 'a=1;' yield assert_raises(IOError, mc.run) # foo_m is not an executable cwd = os.getcwd() basedir = mkdtemp() os.chdir(basedir) res = mlab.MatlabCommand(script='foo', paths=[basedir], mfile=True).run() # bypasses ubuntu dash issue yield assert_equal(res.runtime.returncode, 1) res = mlab.MatlabCommand(script='a=1;', paths=[basedir], mfile=True).run() # bypasses ubuntu dash issue yield assert_equal(res.runtime.returncode, 0) os.chdir(cwd) rmtree(basedir)
def test_callback_exception(): so = Status() wf = pe.Workflow(name='test', base_dir='/tmp') f_node = pe.Node(niu.Function(function=bad_func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) try: wf.run(plugin_args={'status_callback': so.callback}) except: pass assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'exception'
def test_callback_normal(): so = Status() wf = pe.Workflow(name='test', base_dir=mkdtemp()) f_node = pe.Node(niu.Function(function=func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) wf.config['execution'] = {'crashdump_dir': wf.base_dir} wf.run(plugin="Linear", plugin_args={'status_callback': so.callback}) assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'end' rmtree(wf.base_dir)
def test_callback_multiproc_normal(): so = Status() wf = pe.Workflow(name='test', base_dir=mkdtemp()) f_node = pe.Node(niu.Function(function=func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) wf.config['execution'] = {'crashdump_dir': wf.base_dir} wf.run(plugin='MultiProc', plugin_args={'status_callback': so.callback}) assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'end' rmtree(wf.base_dir)
def test_itersource_two_join_nodes(): """Test join with a midstream ``itersource`` and an upstream iterable.""" cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [1, 2])] # an intermediate node in the first iteration path pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') wf.connect(inputspec, 'n', pre_join1, 'input1') # an iterable pre-join node with an itersource pre_join2 = pe.Node(ProductInterface(), name='pre_join2') pre_join2.itersource = ('inputspec', 'n') pre_join2.iterables = ('input1', {1: [3, 4], 2: [5, 6]}) wf.connect(pre_join1, 'output1', pre_join2, 'input2') # an intermediate node in the second iteration path pre_join3 = pe.Node(IncrementInterface(), name='pre_join3') wf.connect(pre_join2, 'output1', pre_join3, 'input1') # the first join node join1 = pe.JoinNode(IdentityInterface(fields=['vector']), joinsource='pre_join2', joinfield='vector', name='join1') wf.connect(pre_join3, 'output1', join1, 'vector') # a join successor node post_join1 = pe.Node(SumInterface(), name='post_join1') wf.connect(join1, 'vector', post_join1, 'input1') # a summary join node join2 = pe.JoinNode(IdentityInterface(fields=['vector']), joinsource='inputspec', joinfield='vector', name='join2') wf.connect(post_join1, 'output1', join2, 'vector') result = wf.run() # the expanded graph contains the 14 test_itersource_join_source_node # nodes plus the summary join node. assert_equal(len(result.nodes()), 15, "The number of expanded nodes is incorrect.") os.chdir(cwd) rmtree(wd)
def test_synchronize_join_node(): """Test join on an input node which has the ``synchronize`` flag set to True.""" global _products _products = [] cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['m', 'n']), name='inputspec') inputspec.iterables = [('m', [1, 2]), ('n', [3, 4])] inputspec.synchronize = True # two pre-join nodes in a parallel iterated path inc1 = pe.Node(IncrementInterface(), name='inc1') wf.connect(inputspec, 'm', inc1, 'input1') inc2 = pe.Node(IncrementInterface(), name='inc2') wf.connect(inputspec, 'n', inc2, 'input1') # the join node join = pe.JoinNode(IdentityInterface(fields=['vector1', 'vector2']), joinsource='inputspec', name='join') wf.connect(inc1, 'output1', join, 'vector1') wf.connect(inc2, 'output1', join, 'vector2') # a post-join node prod = pe.MapNode(ProductInterface(), name='prod', iterfield=['input1', 'input2']) wf.connect(join, 'vector1', prod, 'input1') wf.connect(join, 'vector2', prod, 'input2') result = wf.run() # there are 3 iterables expansions. # thus, the expanded graph contains 2 * 2 iteration pre-join nodes, 1 join # node and 1 post-join node. assert_equal(len(result.nodes()), 6, "The number of expanded nodes is incorrect.") # the product inputs are [2, 3] and [4, 5] assert_equal(_products, [8, 15], "The post-join products is incorrect: %s." % _products) os.chdir(cwd) rmtree(wd)
def test_callback_exception(): so = Status() wf = pe.Workflow(name='test', base_dir=mkdtemp()) f_node = pe.Node(niu.Function(function=bad_func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) wf.config['execution'] = {'crashdump_dir': wf.base_dir} try: wf.run(plugin="Linear", plugin_args={'status_callback': so.callback}) except: pass assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'exception' rmtree(wf.base_dir)
def test8(): pipe = pe.Workflow(name='pipe') mod1 = pe.Node(interface=TestInterface(),name='mod1') mod2 = pe.Node(interface=TestInterface(),name='mod2') mod3 = pe.Node(interface=TestInterface(),name='mod3') mod1.iterables = dict(input1=lambda:[1,2]) mod2.iterables = dict(input1=lambda:[1,2]) mod3.iterables = {} pipe.connect([(mod1,mod3,[('output1','input2')]), (mod2,mod3,[('output1','input2')])]) pipe._create_flat_graph() pipe._execgraph = pe._generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal(len(pipe._execgraph.nodes()), 8) yield assert_equal(len(pipe._execgraph.edges()), 8) edgenum = sorted([(len(pipe._execgraph.in_edges(node)) + \ len(pipe._execgraph.out_edges(node))) \ for node in pipe._execgraph.nodes()]) yield assert_true(edgenum[0]>0)
def test_connect(): pipe = pe.Workflow(name='pipe') mod1 = pe.Node(interface=TestInterface(),name='mod1') mod2 = pe.Node(interface=TestInterface(),name='mod2') pipe.connect([(mod1,mod2,[('output1','input1')])]) yield assert_true(mod1 in pipe._graph.nodes()) yield assert_true(mod2 in pipe._graph.nodes()) yield assert_equal(pipe._graph.get_edge_data(mod1,mod2), {'connect':[('output1','input1')]})
def test_mlab_inputspec(): spec = mlab.MatlabInputSpec() for k in ['paths', 'script', 'nosplash', 'mfile', 'logfile', 'script_file', 'nodesktop']: yield assert_true(k in spec.copyable_trait_names()) yield assert_true(spec.nodesktop) yield assert_true(spec.nosplash) yield assert_false(spec.mfile) yield assert_equal(spec.script_file, 'pyscript.m')
def test_multifield_join_node(): """Test join on several fields.""" global _products _products = [] cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['m', 'n']), name='inputspec') inputspec.iterables = [('m', [1, 2]), ('n', [3, 4])] # two pre-join nodes in a parallel iterated path inc1 = pe.Node(IncrementInterface(), name='inc1') wf.connect(inputspec, 'm', inc1, 'input1') inc2 = pe.Node(IncrementInterface(), name='inc2') wf.connect(inputspec, 'n', inc2, 'input1') # the join node join = pe.JoinNode(IdentityInterface(fields=['vector1', 'vector2']), joinsource='inputspec', name='join') wf.connect(inc1, 'output1', join, 'vector1') wf.connect(inc2, 'output1', join, 'vector2') # a post-join node prod = pe.MapNode(ProductInterface(), name='prod', iterfield=['input1', 'input2']) wf.connect(join, 'vector1', prod, 'input1') wf.connect(join, 'vector2', prod, 'input2') result = wf.run() # the iterables are expanded as the cartesian product of the iterables values. # thus, the expanded graph contains 2 * (2 * 2) iteration pre-join nodes, 1 join # node and 1 post-join node. assert_equal(len(result.nodes()), 10, "The number of expanded nodes is incorrect.") # the product inputs are [2, 4], [2, 5], [3, 4], [3, 5] assert_equal(set(_products), set([8, 10, 12, 15]), "The post-join products is incorrect: %s." % _products) os.chdir(cwd) rmtree(wd)
def test_callback_multiproc_exception(): so = Status() wf = pe.Workflow(name='test', base_dir=mkdtemp()) f_node = pe.Node(niu.Function(function=bad_func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) wf.config['execution']['crashdump_dir'] = wf.base_dir try: wf.run(plugin='MultiProc', plugin_args={'status_callback': so.callback}) except: pass assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'exception' rmtree(wf.base_dir)
def test_node_joinsource(): """Test setting the joinsource to a Node.""" cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [1, 2])] # the join node join = pe.JoinNode(SetInterface(), joinsource=inputspec, joinfield='input1', name='join') # the joinsource is the inputspec name assert_equal(join.joinsource, inputspec.name, "The joinsource is not set to the node name.") os.chdir(cwd) rmtree(wd)
def test_gen_fname(): # Test _gen_fname method of FSLCommand cmd = fsl.FSLCommand(command = 'junk',output_type = 'NIFTI_GZ') pth = os.getcwd() # just the filename fname = cmd._gen_fname('foo.nii.gz',suffix='_fsl') desired = os.path.join(pth, 'foo_fsl.nii.gz') yield assert_equal(fname, desired) # filename with suffix fname = cmd._gen_fname('foo.nii.gz', suffix = '_brain') desired = os.path.join(pth, 'foo_brain.nii.gz') yield assert_equal(fname, desired) # filename with suffix and working directory fname = cmd._gen_fname('foo.nii.gz', suffix = '_brain', cwd = '/data') desired = os.path.join('/data', 'foo_brain.nii.gz') yield assert_equal(fname, desired) # filename with suffix and no file extension change fname = cmd._gen_fname('foo.nii.gz', suffix = '_brain.mat', change_ext = False) desired = os.path.join(pth, 'foo_brain.mat') yield assert_equal(fname, desired)
def test_identity_join_node(): """Test an IdentityInterface join.""" global _sum_operands _sum_operands = [] cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [1, 2, 3])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') wf.connect(inputspec, 'n', pre_join1, 'input1') # the IdentityInterface join node join = pe.JoinNode(IdentityInterface(fields=['vector']), joinsource='inputspec', joinfield='vector', name='join') wf.connect(pre_join1, 'output1', join, 'vector') # an uniterated post-join node post_join1 = pe.Node(SumInterface(), name='post_join1') wf.connect(join, 'vector', post_join1, 'input1') result = wf.run() # the expanded graph contains 1 * 3 iteration pre-join nodes, 1 join # node and 1 post-join node. Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. assert_equal(len(result.nodes()), 5, "The number of expanded nodes is incorrect.") assert_equal(_sum_operands[0], [2, 3, 4], "The join Sum input is incorrect: %s." %_sum_operands[0]) os.chdir(cwd) rmtree(wd)
def test_identity_join_node(): """Test an IdentityInterface join.""" global _sum_operands _sum_operands = [] cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [1, 2, 3])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') wf.connect(inputspec, 'n', pre_join1, 'input1') # the IdentityInterface join node join = pe.JoinNode(IdentityInterface(fields=['vector']), joinsource='inputspec', joinfield='vector', name='join') wf.connect(pre_join1, 'output1', join, 'vector') # an uniterated post-join node post_join1 = pe.Node(SumInterface(), name='post_join1') wf.connect(join, 'vector', post_join1, 'input1') result = wf.run() # the expanded graph contains 1 * 3 iteration pre-join nodes, 1 join # node and 1 post-join node. Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. assert_equal(len(result.nodes()), 5, "The number of expanded nodes is incorrect.") assert_equal(_sum_operands[0], [2, 3, 4], "The join Sum input is incorrect: %s." % _sum_operands[0]) os.chdir(cwd) rmtree(wd)
def test_coreg(): moving = example_data(infile='functional.nii') target = example_data(infile='T1.nii') mat = example_data(infile='trans.mat') coreg = spmu.CalcCoregAffine(matlab_cmd='mymatlab') coreg.inputs.target = target assert_equal(coreg.inputs.matlab_cmd, 'mymatlab') coreg.inputs.moving = moving assert_equal(isdefined(coreg.inputs.mat), False) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) mat = os.path.join(pth, '%s_to_%s.mat' % (mov, tgt)) invmat = fname_presuffix(mat, prefix='inverse_') scrpt = coreg._make_matlab_command(None) assert_equal(coreg.inputs.mat, mat) assert_equal(coreg.inputs.invmat, invmat)
def test_coreg(): moving = example_data(infile="functional.nii") target = example_data(infile="T1.nii") mat = example_data(infile="trans.mat") coreg = spmu.CalcCoregAffine(matlab_cmd="mymatlab") coreg.inputs.target = target assert_equal(coreg.inputs.matlab_cmd, "mymatlab") coreg.inputs.moving = moving assert_equal(isdefined(coreg.inputs.mat), False) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) mat = os.path.join(pth, "%s_to_%s.mat" % (mov, tgt)) invmat = fname_presuffix(mat, prefix="inverse_") scrpt = coreg._make_matlab_command(None) assert_equal(coreg.inputs.mat, mat) assert_equal(coreg.inputs.invmat, invmat)
def test_apply_transform(): moving = example_data(infile = 'functional.nii') mat = example_data(infile = 'trans.mat') applymat = spmu.ApplyTransform(matlab_cmd = 'mymatlab') assert_equal( applymat.inputs.matlab_cmd, 'mymatlab' ) applymat.inputs.in_file = moving applymat.inputs.mat = mat scrpt = applymat._make_matlab_command(None) expected = 'img_space = spm_get_space(infile);' assert_equal( expected in scrpt, True) expected = 'spm_get_space(infile, transform.M * img_space);' assert_equal(expected in scrpt, True)
def test_apply_transform(): moving = example_data(infile='functional.nii') mat = example_data(infile='trans.mat') applymat = spmu.ApplyTransform(matlab_cmd='mymatlab') assert_equal(applymat.inputs.matlab_cmd, 'mymatlab') applymat.inputs.in_file = moving applymat.inputs.mat = mat scrpt = applymat._make_matlab_command(None) expected = '[p n e v] = spm_fileparts(V.fname);' assert_equal(expected in scrpt, True) expected = 'V.mat = transform.M * V.mat;' assert_equal(expected in scrpt, True)
def test_apply_transform(): moving = example_data(infile="functional.nii") mat = example_data(infile="trans.mat") applymat = spmu.ApplyTransform(matlab_cmd="mymatlab") assert_equal(applymat.inputs.matlab_cmd, "mymatlab") applymat.inputs.in_file = moving applymat.inputs.mat = mat scrpt = applymat._make_matlab_command(None) expected = "img_space = spm_get_space(infile);" assert_equal(expected in scrpt, True) expected = "spm_get_space(infile, M * img_space);" assert_equal(expected in scrpt, True)
def test_apply_transform(): moving = example_data(infile='functional.nii') mat = example_data(infile='trans.mat') applymat = spmu.ApplyTransform(matlab_cmd='mymatlab') assert_equal(applymat.inputs.matlab_cmd, 'mymatlab') applymat.inputs.in_file = moving applymat.inputs.mat = mat scrpt = applymat._make_matlab_command(None) expected = 'img_space = spm_get_space(infile);' assert_equal(expected in scrpt, True) expected = 'spm_get_space(infile, transform.M * img_space);' assert_equal(expected in scrpt, True)
def test_join_expansion(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) # Make the workflow. wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [1, 2])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') wf.connect(inputspec, 'n', pre_join1, 'input1') # another pre-join node in the iterated path pre_join2 = pe.Node(IncrementInterface(), name='pre_join2') wf.connect(pre_join1, 'output1', pre_join2, 'input1') # the join node join = pe.JoinNode(SumInterface(), joinsource='inputspec', joinfield='input1', name='join') wf.connect(pre_join2, 'output1', join, 'input1') # an uniterated post-join node post_join1 = pe.Node(IncrementInterface(), name='post_join1') wf.connect(join, 'output1', post_join1, 'input1') # a post-join node in the iterated path post_join2 = pe.Node(ProductInterface(), name='post_join2') wf.connect(join, 'output1', post_join2, 'input1') wf.connect(pre_join1, 'output1', post_join2, 'input2') result = wf.run() # the two expanded pre-join predecessor nodes feed into one join node joins = [node for node in result.nodes() if node.name == 'join'] assert_equal(len(joins), 1, "The number of join result nodes is incorrect.") # the expanded graph contains 2 * 2 = 4 iteration pre-join nodes, 1 join # node, 1 non-iterated post-join node and 2 * 1 iteration post-join nodes. # Nipype factors away the IdentityInterface. assert_equal(len(result.nodes()), 8, "The number of expanded nodes is incorrect.") # the join Sum result is (1 + 1 + 1) + (2 + 1 + 1) assert_equal(len(_sums), 1, "The number of join outputs is incorrect") assert_equal(_sums[0], 7, "The join Sum output value is incorrect: %s." % _sums[0]) # the join input preserves the iterables input order assert_equal(_sum_operands[0], [3, 4], "The join Sum input is incorrect: %s." % _sum_operands[0]) # there are two iterations of the post-join node in the iterable path assert_equal(len(_products), 2, "The number of iterated post-join outputs is incorrect") os.chdir(cwd) rmtree(wd)
def test_reslice(): moving = example_data(infile='functional.nii') space_defining = example_data(infile='T1.nii') reslice = spmu.Reslice(matlab_cmd='mymatlab_version') assert_equal(reslice.inputs.matlab_cmd, 'mymatlab_version') reslice.inputs.in_file = moving reslice.inputs.space_defining = space_defining assert_equal(reslice.inputs.interp, 0) assert_raises(TraitError, reslice.inputs.trait_set, interp='nearest') assert_raises(TraitError, reslice.inputs.trait_set, interp=10) reslice.inputs.interp = 1 script = reslice._make_matlab_command(None) outfile = fname_presuffix(moving, prefix='r') assert_equal(reslice.inputs.out_file, outfile) expected = '\nflags.mean=0;\nflags.which=1;\nflags.mask=0;' assert_equal(expected in script.replace(' ', ''), True) expected_interp = 'flags.interp = 1;\n' assert_equal(expected_interp in script, True) assert_equal('spm_reslice(invols, flags);' in script, True)
def test_dicom_import(): dicom = example_data(infile='dicomdir/123456-1-1.dcm') di = spmu.DicomImport(matlab_cmd='mymatlab') assert_equal(di.inputs.matlab_cmd, 'mymatlab') assert_equal(di.inputs.output_dir_struct, 'flat') assert_equal(di.inputs.output_dir, './converted_dicom') assert_equal(di.inputs.format, 'nii') assert_equal(di.inputs.icedims, False) assert_raises(TraitError, di.inputs.trait_set, output_dir_struct='wrong') assert_raises(TraitError, di.inputs.trait_set, format='FAT') assert_raises(TraitError, di.inputs.trait_set, in_files=['does_sfd_not_32fn_exist.dcm']) di.inputs.in_files = [dicom] assert_equal(di.inputs.in_files, [dicom])