def run_examples(example, pipelines, data_path, plugin=None, rm_base_dir=True): from nipype import config from nipype.interfaces.base import CommandLine if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = int( os.getenv('NIPYPE_NUMBER_OF_CPUS', cpu_count())) __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) results_dir = os.path.join(wf.base_dir, wf.name) if rm_base_dir and os.path.exists(results_dir): rmtree(results_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if not os.path.exists(log_dir): os.makedirs(log_dir) wf.config = { 'execution': { 'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true', 'poll_sleep_duration': 2 }, 'logging': { 'log_directory': log_dir, 'log_to_file': True } } try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
def run_examples(example, pipelines, data_path, plugin=None, rm_base_dir=True): from nipype import config from nipype.interfaces.base import CommandLine if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = int( os.getenv('NIPYPE_NUMBER_OF_CPUS', cpu_count())) module = import_module('.' + example, 'niflow.nipype1.examples') for pipeline in pipelines: wf = getattr(module, pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) results_dir = os.path.join(wf.base_dir, wf.name) if rm_base_dir and os.path.exists(results_dir): rmtree(results_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if not os.path.exists(log_dir): os.makedirs(log_dir) wf.config = { 'execution': { 'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true', 'poll_sleep_duration': 2 }, 'logging': { 'log_directory': log_dir, 'log_to_file': True } } try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
def run_examples(example, pipelines, plugin): print 'running example: %s with plugin: %s'%(example, plugin) from nipype import config config.enable_debug_mode() from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output("stream") __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) wf.config = {'execution' :{'hash_method': 'timestamp', 'stop_on_first_rerun': 'true'}} wf.run(plugin=plugin, plugin_args={'n_procs': 4}) #run twice to check if nothing is rerunning wf.run(plugin=plugin)
def run_examples(example, pipelines, data_path, plugin=None, rm_base_dir=True): from nipype import config from nipype.interfaces.base import CommandLine if plugin is None: plugin = "MultiProc" print("running example: %s with plugin: %s" % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == "MultiProc": plugin_args["n_procs"] = int(os.getenv("NIPYPE_NUMBER_OF_CPUS", cpu_count())) __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), "output", example, plugin) results_dir = os.path.join(wf.base_dir, wf.name) if rm_base_dir and os.path.exists(results_dir): rmtree(results_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), "logs", example) if not os.path.exists(log_dir): os.makedirs(log_dir) wf.config = { "execution": { "hash_method": "timestamp", "stop_on_first_rerun": "true", "write_provenance": "true", "poll_sleep_duration": 2, }, "logging": {"log_directory": log_dir, "log_to_file": True}, } try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
def run_examples(example, pipelines, plugin): print('running example: %s with plugin: %s' % (example, plugin)) from nipype import config config.enable_debug_mode() from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output("stream") __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) wf.config = {'execution': {'hash_method': 'timestamp', 'stop_on_first_rerun': 'true'}} wf.run(plugin=plugin, plugin_args={'n_procs': 4}) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
def run_examples(example, pipelines, data_path, plugin=None): from nipype import config from nipype.interfaces.base import CommandLine if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = cpu_count() __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if os.path.exists(log_dir): rmtree(log_dir) os.makedirs(log_dir) wf.config = {'execution': {'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true'}, 'logging': {'log_directory': log_dir, 'log_to_file': True}} try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # run twice to check if nothing is rerunning wf.run(plugin=plugin)
http://mindboggle.info/data.html specifically the 2mm versions of: - `Joint Fusion Atlas <http://mindboggle.info/data/atlases/jointfusion/OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_2mm_v2.nii.gz>`_ - `MNI template <http://mindboggle.info/data/templates/ants/OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz>`_ """ from __future__ import division, unicode_literals from builtins import open, range, str import os from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output('allatonce') # https://github.com/moloney/dcmstack from dcmstack.extract import default_extractor # pip install pydicom from dicom import read_file from nipype.interfaces import (fsl, Function, ants, freesurfer, nipy) from nipype.interfaces.c3 import C3dAffineTool fsl.FSLCommand.set_default_output_type('NIFTI_GZ') from nipype import Workflow, Node, MapNode from nipype.algorithms.rapidart import ArtifactDetect from nipype.algorithms.misc import TSNR
def run_examples(example, pipelines, data_path, plugin=None): ''' Run example workflows ''' # Import packages from nipype import config from nipype.interfaces.base import CommandLine from nipype.utils import draw_gantt_chart from nipype.pipeline.plugins import log_nodes_cb if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = cpu_count() __import__(example) for pipeline in pipelines: # Init and run workflow wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if os.path.exists(log_dir): rmtree(log_dir) os.makedirs(log_dir) wf.config = { 'execution': { 'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true' } } # Callback log setup if example == 'fmri_spm_nested' and plugin == 'MultiProc' and \ pipeline == 'l2pipeline': # Init callback log import logging cb_log_path = os.path.join(os.path.expanduser('~'), 'callback.log') cb_logger = logging.getLogger('callback') cb_logger.setLevel(logging.DEBUG) handler = logging.FileHandler(cb_log_path) cb_logger.addHandler(handler) plugin_args = {'n_procs': 4, 'status_callback': log_nodes_cb} else: plugin_args = {'n_procs': 4} try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # Draw gantt chart only if pandas is installed try: import pandas pandas_flg = True except ImportError as exc: pandas_flg = False if plugin_args.has_key('status_callback') and pandas_flg: draw_gantt_chart.generate_gantt_chart(cb_log_path, 4) dst_log_html = os.path.join(os.path.expanduser('~'), 'callback.log.html') copyfile(cb_log_path + '.html', dst_log_html)
http://mindboggle.info/data.html specifically the 2mm versions of: - `Joint Fusion Atlas <http://mindboggle.info/data/atlases/jointfusion/OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_2mm_v2.nii.gz>`_ - `MNI template <http://mindboggle.info/data/templates/ants/OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz>`_ """ from __future__ import division, unicode_literals from builtins import open, range, str import os from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output('allatonce') from dicom import read_file from nipype.interfaces import (spm, fsl, Function, ants, freesurfer) from nipype.interfaces.c3 import C3dAffineTool fsl.FSLCommand.set_default_output_type('NIFTI') from nipype import Workflow, Node, MapNode from nipype.interfaces import matlab as mlab mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodisplay") # If SPM is not in your MATLAB path you should add it here # mlab.MatlabCommand.set_default_paths('/software/matlab/spm12')
# The script is largely based on nipype's dmri_mrtrix_dti.py, copied from http://nipy.sourceforge.net/nipype/users/examples/dmri_mrtrix_dti.html import nipype.interfaces.io as nio # Data i/o import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import nipype.interfaces.mrtrix as mrtrix #<---- The important new part! import nipype.interfaces.fsl as fsl import nipype.algorithms.misc as misc import os from dipy.tracking.utils import move_streamlines global move_streamlines from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output('none') fsl.FSLCommand.set_default_output_type('NIFTI') project_dir = os.path.abspath('/om/user/ksitek/exvivo/') data_dir = os.path.join(project_dir,'data/') out_dir = os.path.join(project_dir,'mrtrix') subject_list = ['Reg_S64550']
The 2mm version was generated with:: >>> from nipype import freesurfer as fs >>> rs = fs.Resample() >>> rs.inputs.in_file = 'OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152.nii.gz' >>> rs.inputs.resampled_file = 'OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_2mm.nii.gz' >>> rs.inputs.voxel_size = (2., 2., 2.) >>> rs.inputs.args = '-rt nearest -ns 1' >>> res = rs.run() """ import os from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output('file') from nipype import config config.enable_provenance() from nipype import (ants, afni, fsl, freesurfer, nipy, Function, DataSink) from nipype import Workflow, Node, MapNode from nipype.algorithms.rapidart import ArtifactDetect from nipype.algorithms.misc import TSNR from nipype.interfaces.fsl import EPIDeWarp from nipype.interfaces.io import FreeSurferSource from nipype.interfaces.c3 import C3dAffineTool from nipype.interfaces.utility import Merge, IdentityInterface from nipype.utils.filemanip import filename_to_list
from builtins import open, range, str import os from nipype.interfaces import (spm, fsl, Function, ants, freesurfer) from nipype.interfaces.c3 import C3dAffineTool fsl.FSLCommand.set_default_output_type('NIFTI') from nipype import Workflow, Node, MapNode from nipype.interfaces import matlab as mlab mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodisplay") from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output("allatonce") from nipype.algorithms.rapidart import ArtifactDetect from nipype.algorithms.misc import TSNR from nipype.interfaces.utility import Rename, Merge, IdentityInterface from nipype.utils.filemanip import filename_to_list from nipype.interfaces.io import DataSink, FreeSurferSource import numpy as np import scipy as sp import nibabel as nb imports = [ 'import os', 'import nibabel as nb', 'import numpy as np', 'import scipy as sp', 'from nipype.utils.filemanip import filename_to_list, list_to_filename, split_filename', 'from scipy.special import legendre'
available from: http://mindboggle.info/data.html specifically the 2mm versions of: - `Joint Fusion Atlas <http://mindboggle.info/data/atlases/jointfusion/OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_2mm_v2.nii.gz>`_ - `MNI template <http://mindboggle.info/data/templates/ants/OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz>`_ """ import os from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output("allatonce") from dcmstack.extract import default_extractor from dicom import read_file from nipype.interfaces import fsl, Function, ants, freesurfer, nipy from nipype.interfaces.c3 import C3dAffineTool fsl.FSLCommand.set_default_output_type("NIFTI_GZ") from nipype import Workflow, Node, MapNode from nipype.algorithms.rapidart import ArtifactDetect from nipype.algorithms.misc import TSNR from nipype.interfaces.utility import Rename, Merge, IdentityInterface from nipype.utils.filemanip import filename_to_list
def run_examples(example, pipelines, data_path, plugin=None): ''' Run example workflows ''' # Import packages from nipype import config from nipype.interfaces.base import CommandLine from nipype.utils import draw_gantt_chart from nipype.pipeline.plugins import log_nodes_cb if plugin is None: plugin = 'MultiProc' print('running example: %s with plugin: %s' % (example, plugin)) config.enable_debug_mode() config.enable_provenance() CommandLine.set_default_terminal_output("stream") plugin_args = {} if plugin == 'MultiProc': plugin_args['n_procs'] = cpu_count() __import__(example) for pipeline in pipelines: # Init and run workflow wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) if os.path.exists(log_dir): rmtree(log_dir) os.makedirs(log_dir) wf.config = {'execution': {'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true'}} # Callback log setup if example == 'fmri_spm_nested' and plugin == 'MultiProc' and \ pipeline == 'l2pipeline': # Init callback log import logging cb_log_path = os.path.join(os.path.expanduser('~'), 'callback.log') cb_logger = logging.getLogger('callback') cb_logger.setLevel(logging.DEBUG) handler = logging.FileHandler(cb_log_path) cb_logger.addHandler(handler) plugin_args = {'n_procs' : 4, 'status_callback' : log_nodes_cb} else: plugin_args = {'n_procs' : 4} try: wf.inputs.inputnode.in_data = os.path.abspath(data_path) except AttributeError: pass # the workflow does not have inputnode.in_data wf.run(plugin=plugin, plugin_args=plugin_args) # Draw gantt chart only if pandas is installed try: import pandas pandas_flg = True except ImportError as exc: pandas_flg = False if plugin_args.has_key('status_callback') and pandas_flg: draw_gantt_chart.generate_gantt_chart(cb_log_path, 4) dst_log_html = os.path.join(os.path.expanduser('~'), 'callback.log.html') copyfile(cb_log_path+'.html', dst_log_html)
>>> from nipype import freesurfer as fs >>> rs = fs.Resample() >>> rs.inputs.in_file = 'OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152.nii.gz' >>> rs.inputs.resampled_file = 'OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_2mm.nii.gz' >>> rs.inputs.voxel_size = (2., 2., 2.) >>> rs.inputs.args = '-rt nearest -ns 1' >>> res = rs.run() """ import os from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output("file") from nipype import config config.enable_provenance() from nipype import ants, afni, fsl, freesurfer, nipy, Function, DataSink from nipype import Workflow, Node, MapNode from nipype.algorithms.rapidart import ArtifactDetect from nipype.algorithms.misc import TSNR from nipype.interfaces.fsl import EPIDeWarp from nipype.interfaces.io import FreeSurferSource from nipype.interfaces.c3 import C3dAffineTool from nipype.interfaces.utility import Merge, IdentityInterface from nipype.utils.filemanip import filename_to_list