def make_nipype_pipeline(self, base_dir): from nipype import config config.enable_provenance() import nipype.pipeline.engine as pe import re self.compute_dependencies() nodes = [] pipeline = pe.Workflow('converted_to_nipype', base_dir) i=0 for command in self.commands: (interface, input_map, output_map) = make_nipype_function_interface(command) comment = re.sub(r'\W','_', command.comment) #comment = command.comment.replace(' ','_') nodes.append((pe.Node(interface=interface, name=comment), input_map, output_map)) i+=1 (input_nodes, mapping) = self.make_selectfiles(nodes) for i in xrange(len(self.commands)): (parent_node, parent_inputs, parent_outputs) = nodes[i] for (input_file, input_name) in parent_inputs.items(): if input_file in mapping: (fields, j) = mapping[input_file] input_source = input_nodes[j] nipype_input_name = make_nipype_name(fields) pipeline.connect([(input_source, parent_node, [(nipype_input_name, input_name)])]) for j in xrange(i, len(self.commands)): (child_node, child_inputs, _) = nodes[j] files = self.dependency_files[i,j] for file in files: assert file in parent_outputs assert file in child_inputs pipeline.connect([(parent_node, child_node, [(parent_outputs[file], child_inputs[file])])]) return pipeline
def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir): """ """ import nipype if with_prov: from nipype import config config.enable_provenance() from nipype import Node from nipype.interfaces.dcm2nii import Dcm2niix item_dicoms = list(map(op.abspath, item_dicoms)) # absolute paths dicom_dir = op.dirname(item_dicoms[0]) if item_dicoms else None convertnode = Node(Dcm2niix(), name='convert') convertnode.base_dir = tmpdir convertnode.inputs.source_names = item_dicoms convertnode.inputs.out_filename = op.basename(op.dirname(prefix)) if nipype.__version__.split('.')[0] == '0': # deprecated since 1.0, might be needed(?) before convertnode.inputs.terminal_output = 'allatonce' else: convertnode.terminal_output = 'allatonce' convertnode.inputs.bids_format = bids eg = convertnode.run() # prov information prov_file = prefix + '_prov.ttl' if with_prov else None if prov_file: safe_copyfile(op.join(convertnode.base_dir, convertnode.name, 'provenance.ttl'), prov_file) return eg, prov_file
def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir): """ """ import nipype if with_prov: from nipype import config config.enable_provenance() from nipype import Node from nipype.interfaces.dcm2nii import Dcm2niix item_dicoms = list(map(op.abspath, item_dicoms)) # absolute paths dicom_dir = op.dirname(item_dicoms[0]) if item_dicoms else None convertnode = Node(Dcm2niix(), name='convert') convertnode.base_dir = tmpdir convertnode.inputs.source_dir = dicom_dir convertnode.inputs.out_filename = op.basename(op.dirname(prefix)) if nipype.__version__.split('.')[0] == '0': # deprecated since 1.0, might be needed(?) before convertnode.inputs.terminal_output = 'allatonce' else: convertnode.terminal_output = 'allatonce' convertnode.inputs.bids_format = bids eg = convertnode.run() # prov information prov_file = prefix + '_prov.ttl' if with_prov else None if prov_file: safe_copyfile( op.join(convertnode.base_dir, convertnode.name, 'provenance.ttl'), prov_file) return eg, prov_file
def nipype_convert(item_dicoms, prefix, with_prov, bids_options, tmpdir, dcmconfig=None): """ Converts DICOMs grouped from heuristic using Nipype's Dcm2niix interface. Parameters ---------- item_dicoms : List DICOM files to convert prefix : String Heuristic output path with_prov : Bool Store provenance information bids_options : List or None If not None then output BIDS sidecar JSONs List may contain bids specific options tmpdir : Directory Conversion working directory dcmconfig : File (optional) JSON file used for additional Dcm2niix configuration """ import nipype if with_prov: from nipype import config config.enable_provenance() from nipype import Node from nipype.interfaces.dcm2nii import Dcm2niix item_dicoms = list(map(op.abspath, item_dicoms)) # absolute paths fromfile = dcmconfig if dcmconfig else None if fromfile: lgr.info("Using custom config file %s", fromfile) convertnode = Node(Dcm2niix(from_file=fromfile), name='convert') convertnode.base_dir = tmpdir convertnode.inputs.source_names = item_dicoms convertnode.inputs.out_filename = prefix if nipype.__version__.split('.')[0] == '0': # deprecated since 1.0, might be needed(?) before convertnode.inputs.terminal_output = 'allatonce' else: convertnode.terminal_output = 'allatonce' convertnode.inputs.bids_format = bids_options is not None eg = convertnode.run() # prov information prov_file = prefix + '_prov.ttl' if with_prov else None if prov_file: safe_copyfile( op.join(convertnode.base_dir, convertnode.name, 'provenance.ttl'), prov_file) return eg, prov_file
def test_provenance_exists(tmpdir): tmpdir.chdir() from nipype import config from nipype.interfaces.base import CommandLine provenance_state = config.get('execution', 'write_provenance') hash_state = config.get('execution', 'hash_method') config.enable_provenance() CommandLine('echo hello').run() config.set('execution', 'write_provenance', provenance_state) config.set('execution', 'hash_method', hash_state) assert tmpdir.join('provenance.provn').check()
def test_provenance_exists(tmpdir): tmpdir.chdir() from nipype import config from nipype.interfaces.base import CommandLine provenance_state = config.get("execution", "write_provenance") hash_state = config.get("execution", "hash_method") config.enable_provenance() CommandLine("echo hello").run() config.set("execution", "write_provenance", provenance_state) config.set("execution", "hash_method", hash_state) assert tmpdir.join("provenance.provn").check()
def test_provenance_exists(tmpdir): tempdir = str(tmpdir) os.chdir(tempdir) from nipype import config from nipype.interfaces.base import CommandLine provenance_state = config.get('execution', 'write_provenance') hash_state = config.get('execution', 'hash_method') config.enable_provenance() CommandLine('echo hello').run() config.set('execution', 'write_provenance', provenance_state) config.set('execution', 'hash_method', hash_state) provenance_exists = os.path.exists(os.path.join(tempdir, 'provenance.provn')) assert provenance_exists
def test_provenance_exists(tmpdir): tempdir = str(tmpdir) os.chdir(tempdir) from nipype import config from nipype.interfaces.base import CommandLine provenance_state = config.get('execution', 'write_provenance') hash_state = config.get('execution', 'hash_method') config.enable_provenance() CommandLine('echo hello').run() config.set('execution', 'write_provenance', provenance_state) config.set('execution', 'hash_method', hash_state) provenance_exists = os.path.exists( os.path.join(tempdir, 'provenance.provn')) assert provenance_exists
def run_examples(example, pipelines, data_path, plugin=None, rm_base_dir=True): from nipype import config from nipype.interfaces.base import CommandLine if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = int( os.getenv('NIPYPE_NUMBER_OF_CPUS', cpu_count())) module = import_module('.' + example, 'niflow.nipype1.examples') for pipeline in pipelines: wf = getattr(module, pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) results_dir = os.path.join(wf.base_dir, wf.name) if rm_base_dir and os.path.exists(results_dir): rmtree(results_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if not os.path.exists(log_dir): os.makedirs(log_dir) wf.config = { 'execution': { 'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true', 'poll_sleep_duration': 2 }, 'logging': { 'log_directory': log_dir, 'log_to_file': True } } try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
def run_examples(example, pipelines, data_path, plugin=None, rm_base_dir=True): from nipype import config from nipype.interfaces.base import CommandLine if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = int( os.getenv('NIPYPE_NUMBER_OF_CPUS', cpu_count())) __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) results_dir = os.path.join(wf.base_dir, wf.name) if rm_base_dir and os.path.exists(results_dir): rmtree(results_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if not os.path.exists(log_dir): os.makedirs(log_dir) wf.config = { 'execution': { 'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true', 'poll_sleep_duration': 2 }, 'logging': { 'log_directory': log_dir, 'log_to_file': True } } try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir): """ """ if with_prov: from nipype import config config.enable_provenance() from nipype import Node from nipype.interfaces.dcm2nii import Dcm2niix item_dicoms = list(map(op.abspath, item_dicoms)) # absolute paths convertnode = Node(Dcm2niix(), name='convert') convertnode.base_dir = tmpdir convertnode.inputs.source_names = item_dicoms convertnode.inputs.out_filename = op.basename(op.dirname(prefix)) convertnode.inputs.bids_format = bids convertnode.inputs.anon_bids = False return convertnode.run()
def run_examples(example, pipelines, data_path, plugin=None, rm_base_dir=True): from nipype import config from nipype.interfaces.base import CommandLine if plugin is None: plugin = "MultiProc" print("running example: %s with plugin: %s" % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == "MultiProc": plugin_args["n_procs"] = int(os.getenv("NIPYPE_NUMBER_OF_CPUS", cpu_count())) __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), "output", example, plugin) results_dir = os.path.join(wf.base_dir, wf.name) if rm_base_dir and os.path.exists(results_dir): rmtree(results_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), "logs", example) if not os.path.exists(log_dir): os.makedirs(log_dir) wf.config = { "execution": { "hash_method": "timestamp", "stop_on_first_rerun": "true", "write_provenance": "true", "poll_sleep_duration": 2, }, "logging": {"log_directory": log_dir, "log_to_file": True}, } try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
def run_examples(example, pipelines, plugin): print('running example: %s with plugin: %s' % (example, plugin)) from nipype import config config.enable_debug_mode() config.enable_provenance() from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output("stream") __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) wf.config = {'execution': {'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true'}} wf.run(plugin=plugin, plugin_args={'n_procs': 4}) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
def run_examples(example, pipelines, data_path, plugin=None): from nipype import config from nipype.interfaces.base import CommandLine if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = cpu_count() __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if os.path.exists(log_dir): rmtree(log_dir) os.makedirs(log_dir) wf.config = {'execution': {'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true'}, 'logging': {'log_directory': log_dir, 'log_to_file': True}} try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
def run_examples(example, pipelines, plugin): print('running example: %s with plugin: %s' % (example, plugin)) from nipype import config config.enable_debug_mode() config.enable_provenance() from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output("stream") __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) wf.config = { 'execution': { 'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true' } } wf.run(plugin=plugin, plugin_args={'n_procs': 4}) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
#!/usr/bin/env python # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ ============================ fMRI: OpenfMRI.org data, FSL ============================ A growing number of datasets are available on `OpenfMRI <http://openfmri.org>`_. This script demonstrates how to use nipype to analyze a data set. python fmri_openfmri.py --datasetdir ds107 """ from nipype import config config.enable_provenance() from nipype.external import six from glob import glob import os import nipype.pipeline.engine as pe import nipype.algorithms.modelgen as model import nipype.algorithms.rapidart as ra import nipype.interfaces.fsl as fsl import nipype.interfaces.io as nio import nipype.interfaces.utility as niu from nipype.workflows.fmri.fsl import (create_featreg_preproc, create_modelfit_workflow, create_fixed_effects_flow, create_reg_workflow)
def convert(items, anonymizer=None, symlink=True, converter=None): prov_files = [] tmpdir = mkdtemp() for item in items: if isinstance(item[1], (list, tuple)): outtypes = item[1] else: outtypes = [item[1]] prefix = item[0] print('Converting %s' % prefix) dirname = os.path.dirname(prefix + '.ext') print(dirname) if not os.path.exists(dirname): os.makedirs(dirname) for outtype in outtypes: print(outtype) if outtype == 'dicom': dicomdir = prefix + '_dicom' if os.path.exists(dicomdir): shutil.rmtree(dicomdir) os.mkdir(dicomdir) for filename in item[2]: outfile = os.path.join(dicomdir, os.path.split(filename)[1]) if not os.path.islink(outfile): if symlink: os.symlink(filename, outfile) else: os.link(filename, outfile) elif outtype in ['nii', 'nii.gz']: outname = prefix + '.' + outtype scaninfo = prefix + '_scaninfo.json' if not os.path.exists(outname): from nipype import config config.enable_provenance() from nipype import Function, Node from nipype.interfaces.base import isdefined print converter if converter == 'mri_convert': from nipype.interfaces.freesurfer.preprocess import MRIConvert convertnode = Node(MRIConvert(), name = 'convert') convertnode.base_dir = tmpdir if outtype == 'nii.gz': convertnode.inputs.out_type = 'niigz' convertnode.inputs.in_file = item[2][0] convertnode.inputs.out_file = outname #cmd = 'mri_convert %s %s' % (item[2][0], outname) #print(cmd) #os.system(cmd) res=convertnode.run() elif converter == 'dcm2nii': from nipype.interfaces.dcm2nii import Dcm2nii convertnode = Node(Dcm2nii(), name='convert') convertnode.base_dir = tmpdir convertnode.inputs.source_names = item[2] convertnode.inputs.gzip_output = outtype == 'nii.gz' convertnode.inputs.terminal_output = 'allatonce' res = convertnode.run() if isinstance(res.outputs.converted_files, list): print("Cannot convert dicom files - series likely has multiple orientations: ", item[2]) continue else: shutil.copyfile(res.outputs.converted_files, outname) if isdefined(res.outputs.bvecs): outname_bvecs = prefix + '.bvecs' outname_bvals = prefix + '.bvals' shutil.copyfile(res.outputs.bvecs, outname_bvecs) shutil.copyfile(res.outputs.bvals, outname_bvals) prov_file = prefix + '_prov.ttl' shutil.copyfile(os.path.join(convertnode.base_dir, convertnode.name, 'provenance.ttl'), prov_file) prov_files.append(prov_file) embedfunc = Node(Function(input_names=['dcmfiles', 'niftifile', 'infofile', 'force'], output_names=['outfile', 'meta'], function=embed_nifti), name='embedder') embedfunc.inputs.dcmfiles = item[2] embedfunc.inputs.niftifile = outname embedfunc.inputs.infofile = scaninfo embedfunc.inputs.force = True embedfunc.base_dir = tmpdir res = embedfunc.run() g = res.provenance.rdf() g.parse(prov_file, format='turtle') g.serialize(prov_file, format='turtle') #out_file, meta_dict = embed_nifti(item[2], outname, force=True) os.chmod(outname, 0440) os.chmod(scaninfo, 0440) os.chmod(prov_file, 0440) shutil.rmtree(tmpdir)
def run_examples(example, pipelines, data_path, plugin=None): ''' Run example workflows ''' # Import packages from nipype import config from nipype.interfaces.base import CommandLine from nipype.utils import draw_gantt_chart from nipype.pipeline.plugins import log_nodes_cb if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = cpu_count() __import__(example) for pipeline in pipelines: # Init and run workflow wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if os.path.exists(log_dir): rmtree(log_dir) os.makedirs(log_dir) wf.config = { 'execution': { 'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true' } } # Callback log setup if example == 'fmri_spm_nested' and plugin == 'MultiProc' and \ pipeline == 'l2pipeline': # Init callback log import logging cb_log_path = os.path.join(os.path.expanduser('~'), 'callback.log') cb_logger = logging.getLogger('callback') cb_logger.setLevel(logging.DEBUG) handler = logging.FileHandler(cb_log_path) cb_logger.addHandler(handler) plugin_args = {'n_procs': 4, 'status_callback': log_nodes_cb} else: plugin_args = {'n_procs': 4} try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # Draw gantt chart only if pandas is installed try: import pandas pandas_flg = True except ImportError as exc: pandas_flg = False if plugin_args.has_key('status_callback') and pandas_flg: draw_gantt_chart.generate_gantt_chart(cb_log_path, 4) dst_log_html = os.path.join(os.path.expanduser('~'), 'callback.log.html') copyfile(cb_log_path + '.html', dst_log_html)
#!/usr/bin/env python # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ ============================================= fMRI: OpenfMRI.org data, FSL, ANTS, c3daffine ============================================= A growing number of datasets are available on `OpenfMRI <http://openfmri.org>`_. This script demonstrates how to use nipype to analyze a data set:: python fmri_ants_openfmri.py --datasetdir ds107 """ from nipype import config config.enable_provenance() from nipype.external import six from glob import glob import os import nipype.pipeline.engine as pe import nipype.algorithms.modelgen as model import nipype.algorithms.rapidart as ra import nipype.interfaces.fsl as fsl import nipype.interfaces.ants as ants from nipype.algorithms.misc import TSNR from nipype.interfaces.c3 import C3dAffineTool import nipype.interfaces.io as nio import nipype.interfaces.utility as niu
def run_examples(example, pipelines, data_path, plugin=None): ''' Run example workflows ''' # Import packages from nipype import config from nipype.interfaces.base import CommandLine from nipype.utils import draw_gantt_chart from nipype.pipeline.plugins import log_nodes_cb if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = cpu_count() __import__(example) for pipeline in pipelines: # Init and run workflow wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if os.path.exists(log_dir): rmtree(log_dir) os.makedirs(log_dir) wf.config = {'execution': {'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true'}} # Callback log setup if example == 'fmri_spm_nested' and plugin == 'MultiProc' and \ pipeline == 'l2pipeline': # Init callback log import logging cb_log_path = os.path.join(os.path.expanduser('~'), 'callback.log') cb_logger = logging.getLogger('callback') cb_logger.setLevel(logging.DEBUG) handler = logging.FileHandler(cb_log_path) cb_logger.addHandler(handler) plugin_args = {'n_procs' : 4, 'status_callback' : log_nodes_cb} else: plugin_args = {'n_procs' : 4} try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # Draw gantt chart only if pandas is installed try: import pandas pandas_flg = True except ImportError as exc: pandas_flg = False if plugin_args.has_key('status_callback') and pandas_flg: draw_gantt_chart.generate_gantt_chart(cb_log_path, 4) dst_log_html = os.path.join(os.path.expanduser('~'), 'callback.log.html') copyfile(cb_log_path+'.html', dst_log_html)