def test_simple_run(self): """ Method to test a simple 1 cpu call with the scheduler. """ # Configure the environment study_config = StudyConfig( modules=[], use_smart_caching=True, output_directory=self.outdir, number_of_cpus=1, generate_logging=True, use_scheduler=True) # Create pipeline pipeline = get_process_instance(self.pipeline_name) pipeline.date_in_filename = True # Set pipeline input parameters dicom_dataset = get_sample_data("dicom") dcmfolder = os.path.join(self.outdir, "dicom") if not os.path.isdir(dcmfolder): os.makedirs(dcmfolder) shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm")) pipeline.source_dir = dcmfolder # View pipeline if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() # Execute the pipeline in the configured study study_config.run(pipeline)
def run_pipeline_io(self, filename): pipeline = MyPipeline() from capsul.pipeline import pipeline_tools pipeline_tools.save_pipeline(pipeline, filename) pipeline2 = get_process_instance(filename) pipeline2.workflow_ordered_nodes() if self.debug: from soma.qt_gui.qt_backend import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView import sys app = QtGui.QApplication.instance() if not app: app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline, allow_open_controller=True, enable_edition=True, show_sub_pipelines=True) view2 = PipelineDevelopperView(pipeline2, allow_open_controller=True, enable_edition=True, show_sub_pipelines=True) view1.show() view2.show() app.exec_() self.assertTrue( pipeline2.workflow_repr in ("constant->node1->node2", "node1->constant->node2"), '%s not in ("constant->node1->node2", "node1->constant->node2")' % pipeline2.workflow_repr) d1 = pipeline_tools.dump_pipeline_state_as_dict(pipeline) d2 = pipeline_tools.dump_pipeline_state_as_dict(pipeline2) self.assertEqual(d1, d2)
def run_spm_preprocessing(funcfile, outdir, repetition_time, ref_slice, slice_order, template, timings_corr_algo, normalization, spm_bin, fsl_config, enable_display=False): """ """ print "Study_config init..." study_config = StudyConfig( modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"], use_smart_caching=False, fsl_config=fsl_config, use_fsl=True, use_matlab=False, use_spm=True, spm_exec=spm_bin, spm_standalone=True, use_nipype=True, output_directory=outdir, ) print " ... done." # Processing definition: create the <clinfmri.preproc.FmriPreproc> that # define the different step of the processings. pipeline = get_process_instance( "clinfmri.preproc.converted_fmri_preproc.xml") # It is possible to display the pipeline. if enable_display: import sys from PySide import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView app = QtGui.QApplication(sys.argv) view = PipelineDevelopperView(pipeline) view.show() app.exec_() # Now to parametrize the pipeline pipeline. pipeline.fmri_file = funcfile pipeline.realign_register_to_mean = True pipeline.select_slicer = timings_corr_algo pipeline.select_normalization = normalization pipeline.force_repetition_time = repetition_time pipeline.force_slice_orders = slice_order pipeline.realign_wrap = [0, 1, 0] pipeline.realign_write_wrap = [0, 1, 0] pipeline.ref_slice = ref_slice if template is not None: pipeline.template_file = template # The pipeline is now ready to be executed. study_config.run(pipeline, executer_qc_nodes=False, verbose=1)
def pilot_gdti_estimation(): """ Generalized diffusion tensor estimation ======================================= """ # System import import os import sys import datetime import PySide.QtGui as QtGui # CAPSUL import from capsul.qt_gui.widgets import PipelineDevelopperView from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance """ Study configuration ------------------- We first define the working directory and guarantee this folder exists on the file system: """ working_dir = "/volatile/nsap/clindmri/gdti" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ And then define the study configuration (here we activate the smart caching module that will be able to remember which process has already been processed): """ study_config = StudyConfig(modules=["SmartCachingConfig"], use_smart_caching=True, output_directory=working_dir) # Create pipeline start_time = datetime.datetime.now() print "Start Pipeline Creation", start_time pipeline = get_process_instance("clindmri.estimation.gdti.xml") print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # View pipeline if 0: app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() del view1 # Set pipeline input parameters pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz" pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval" pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec" pipeline.order = 2 pipeline.odf = False print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # Execute the pipeline in the configured study study_config.run(pipeline, verbose=1)
class ActivationInspectorApp(Application): """ ActiovationInspector Application. """ # Load some meta informations from capsul.info import __version__ as _version from capsul.info import NAME as _application_name from capsul.info import ORGANISATION as _organisation_name def __init__(self, pipeline_path, record_file=None, *args, **kwargs): """ Method to initialize the ActivationInspectorApp class. Parameters ---------- pipeline_path: str (mandatory) the name of the pipeline we want to load. record_file: str (optional) a file where the pipeline activation steps are stored. """ # Inhetritance super(ActivationInspectorApp, self).__init__(*args, **kwargs) # Load the pipeline self.pipeline = get_process_instance(pipeline_path) # Initialize the application self.record_file = record_file self.window = None self.init_window() def init_window(self): """ Method to initialize the main window. """ # First set some meta informations self.setApplicationName(self._application_name) self.setOrganizationName(self._organisation_name) self.setApplicationVersion(self._version) # Get the user interface description from capsul resources ui_file = os.path.join(os.path.dirname(__file__), "activation_inspector.ui") #ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui") # Create and show the activation/pipeline/controller windows self.pipeline_window = PipelineDevelopperView(self.pipeline, show_sub_pipelines=True) self.controller_window = ScrollControllerWidget(self.pipeline, live=True) self.activation_window = ActivationInspector( self.pipeline, ui_file, self.record_file, developper_view=self.pipeline_window) self.pipeline_window.show() self.activation_window.show() self.controller_window.show() return True
class ActivationInspectorApp(Application): """ ActivationInspector Application. While developing a pipeline, nodes are connected through links. Nodes will be automatically activated or disabled depending on their connections (a mandatory link to a disabled node will disable the current one). You will often wonder why a node will not be activated. This tool helps to determine when and why by "playing" activation rules sequences step-by-step and displaying nodes which activate or deactivate at each step. """ # Load some meta informations from capsul.info import __version__ as _version from capsul.info import NAME as _application_name from capsul.info import ORGANISATION as _organisation_name def __init__(self, pipeline_path, record_file=None, *args, **kwargs): """ Method to initialize the ActivationInspectorApp class. Parameters ---------- pipeline_path: str (mandatory) the name of the pipeline we want to load. record_file: str (optional) a file where the pipeline activation steps are stored. """ # Inhetritance super(ActivationInspectorApp, self).__init__(*args, **kwargs) # Load the pipeline self.pipeline = get_process_instance(pipeline_path) # Initialize the application self.record_file = record_file self.window = None self.init_window() def init_window(self): """ Method to initialize the main window. """ # First set some meta informations self.setApplicationName(self._application_name) self.setOrganizationName(self._organisation_name) self.setApplicationVersion(self._version) # Get the user interface description from capsul resources ui_file = os.path.join( os.path.dirname(__file__), "activation_inspector.ui") #ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui") # Create and show the activation/pipeline/controller windows self.pipeline_window = PipelineDevelopperView(self.pipeline, show_sub_pipelines=True) self.controller_window = ScrollControllerWidget(self.pipeline,live=True) self.activation_window = ActivationInspector( self.pipeline, ui_file, self.record_file, developper_view=self.pipeline_window) self.pipeline_window.show() self.activation_window.show() self.controller_window.show() return True
class ActivationInspectorApp(Application): """ ActiovationInspector Application. """ # Load some meta informations from capsul.info import __version__ as _version from capsul.info import NAME as _application_name from capsul.info import ORGANISATION as _organisation_name def __init__(self, pipeline_path, record_file=None, *args, **kwargs): """ Method to initialize the ActivationInspectorApp class. Parameters ---------- pipeline_path: str (mandatory) the name of the pipeline we want to load. record_file: str (optional) a file where the pipeline activation steps are stored. """ # Inhetritance super(ActivationInspectorApp, self).__init__(*args, **kwargs) # Load the pipeline self.pipeline = get_process_instance(pipeline_path) # Initialize the application self.record_file = record_file self.window = None self.init_window() def init_window(self): """ Method to initialize the main window. """ # First set some meta informations self.setApplicationName(self._application_name) self.setOrganizationName(self._organisation_name) self.setApplicationVersion(self._version) # Get the user interface description from capsul resources ui_file = os.path.join( os.path.dirname(__file__), "activation_inspector.ui") #ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui") # Create and show the activation/pipeline/controller windows self.pipeline_window = PipelineDevelopperView(self.pipeline) self.controller_window = ScrollControllerWidget(self.pipeline,live=True) self.activation_window = ActivationInspector( self.pipeline, ui_file, self.record_file, developper_view=self.pipeline_window) self.pipeline_window.show() self.activation_window.show() self.controller_window.show() return True
def pilot_bet(enable_display=False): """ BET === Brain extraction with FSL. Start to import required modules: """ import os from mmutils.toy_datasets import get_sample_data from capsul.study_config import StudyConfig from capsul.process import get_process_instance """ Study configuration ------------------- We first define the working directory and guarantee this folder exists on the file system: """ working_dir = "/volatile/nsap/catalogue/pclinfmri/fsl_bet" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ And then define the study configuration: """ study_config = StudyConfig( modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"], use_smart_caching=False, fsl_config="/etc/fsl/4.1/fsl.sh", use_fsl=True, output_directory=working_dir, number_of_cpus=1, generate_logging=True, use_scheduler=True) """ Load the toy dataset -------------------- To do so, we use the get_sample_data function to download the toy dataset on the local file system (here localizer data): """ toy_dataset = get_sample_data("localizer") """ The toy_dataset is an Enum structure with some specific elements of interest: * fmri: the functional volume. * anat: the structural volume. * TR: the repetition time. Processing definition --------------------- First create the :ref:`slice timing pipeline <clinfmri.preproc.FslBet>` that define the different step of the processings: """ pipeline = get_process_instance("clinfmri.utils.fsl_bet.xml") print pipeline.get_input_spec() """ It is possible to display the pipeline. """ if enable_display: import sys from PySide import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView app = QtGui.QApplication(sys.argv) view = PipelineDevelopperView(pipeline) view.show() app.exec_() """ Now we need now to parametrize this pipeline: """ pipeline.input_image_file = toy_dataset.anat pipeline.generate_binary_mask = True pipeline.bet_threshold = 0.5 """ The pipeline is now ready to be run: """ study_config.run(pipeline, executer_qc_nodes=False, verbose=1) """ Results ------- Finally, we print the pipeline outputs: """ print("\nOUTPUTS\n") for trait_name, trait_value in pipeline.get_outputs().items(): print("{0}: {1}".format(trait_name, trait_value))
def pilot_new_segment(enable_display=False): """ New Segment =========== Unifed SPM segmentation: segments, bias corrects and spatially normalises. Start to import required modules: """ import os from mmutils.toy_datasets import get_sample_data from capsul.study_config import StudyConfig from capsul.process import get_process_instance """ Study configuration ------------------- We first define the working directory and guarantee this folder exists on the file system: """ working_dir = "/volatile/nsap/catalogue/pclinfmri/spm_newsegment" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ And then define the study configuration: """ study_config = StudyConfig( modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"], use_smart_caching=False, matlab_exec="/neurospin/local/bin/matlab", use_matlab=True, spm_directory="/i2bm/local/spm8", use_spm=True, output_directory=working_dir, number_of_cpus=1, generate_logging=True, use_scheduler=True) """ Load the toy dataset -------------------- To do so, we use the get_sample_data function to download the toy dataset on the local file system (here localizer data): """ toy_dataset = get_sample_data("localizer") template_dataset = get_sample_data("mni_1mm") """ The toy_dataset is an Enum structure with some specific elements of interest: * fmri: the functional volume. * anat: the structural volume. * TR: the repetition time. Processing definition --------------------- First create the :ref:`slice timing pipeline <clinfmri.utils.SpmNewSegment>` that define the different step of the processings: """ pipeline = get_process_instance("clinfmri.utils.spm_new_segment.xml") print pipeline.get_input_spec() """ It is possible to display the pipeline. """ if enable_display: import sys from PySide import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView app = QtGui.QApplication(sys.argv) view = PipelineDevelopperView(pipeline) view.show() app.exec_() """ Now we need now to parametrize this pipeline: """ pipeline.channel_files = [toy_dataset.mean] pipeline.reference_volume = template_dataset.brain """ The pipeline is now ready to be run: """ study_config.run(pipeline, executer_qc_nodes=False, verbose=1) """ Results ------- Finally, we print the pipeline outputs: """ print("\nOUTPUTS\n") for trait_name, trait_value in pipeline.get_outputs().items(): print("{0}: {1}".format(trait_name, trait_value))
def pilot_qa_fmri(): """ Imports ------- This code needs 'capsul' and 'mmutils' package in order to instanciate and execute the pipeline and to get a toy dataset. These packages are available in the 'neurospin' source list or in pypi. """ # Capsul import from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance # Mmutils import from mmutils.toy_datasets import get_sample_data """ Parameters ---------- The 'pipeline_name' parameter contains the location of the pipeline XML description that will perform the DICOMs conversion, and the 'outdir' the location of the pipeline's results: in this case a temporary directory. """ pipeline_name = "mmqa.fmri.fmri_quality_assurance_bbox.xml" outdir = tempfile.mkdtemp() """ Capsul configuration -------------------- A 'StudyConfig' has to be instantiated in order to execute the pipeline properly. It enables us to define the results directory through the 'output_directory' attribute, the number of CPUs to be used through the 'number_of_cpus' attributes, and to specify that we want a log of the processing step through the 'generate_logging'. The 'use_scheduler' must be set to True if more than 1 CPU is used. """ study_config = StudyConfig(number_of_cpus=1, generate_logging=True, use_scheduler=True, output_directory=outdir) """ Get the toy dataset ------------------- The toy dataset is composed of a functional image that is downloaded if it is necessary throught the 'get_sample_data' function and exported locally. """ localizer_dataset = get_sample_data("localizer_extra") """ Pipeline definition ------------------- The pipeline XML description is first imported throught the 'get_process_instance' method, and the resulting pipeline instance is parametrized: in this example we decided to set the date in the converted file name and we set two DICOM directories to be converted in Nifti format. """ pipeline = get_process_instance(pipeline_name) pipeline.image_file = localizer_dataset.fmri pipeline.repetition_time = 2.0 pipeline.exclude_volume = [] pipeline.roi_size = 21 pipeline.score_file = os.path.join(outdir, "scores.json") """ Pipeline representation ----------------------- By executing this block of code, a pipeline representation can be displayed. This representation is composed of boxes connected to each other. """ if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() """ Pipeline execution ------------------ Finally the pipeline is eecuted in the defined 'study_config'. """ study_config.run(pipeline) """ Access the result ----------------- Display the computed scores """ scores_file = pipeline.scores_file with open(scores_file, "r") as _file: scores = json.load(_file) for key, value in scores.iteritems(): print "{0} = {1}".format(key, value)
def pilot_qa_fmri(): """ Imports ------- This code needs 'capsul' and 'mmutils' package in order to instanciate and execute the pipeline and to get a toy dataset. These packages are available in the 'neurospin' source list or in pypi. """ # Capsul import from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance # Mmutils import from mmutils.toy_datasets import get_sample_data """ Parameters ---------- The 'pipeline_name' parameter contains the location of the pipeline XML description that will perform the DICOMs conversion, and the 'outdir' the location of the pipeline's results: in this case a temporary directory. """ pipeline_name = "mmqa.fmri.fmri_quality_assurance_bbox.xml" outdir = tempfile.mkdtemp() """ Capsul configuration -------------------- A 'StudyConfig' has to be instantiated in order to execute the pipeline properly. It enables us to define the results directory through the 'output_directory' attribute, the number of CPUs to be used through the 'number_of_cpus' attributes, and to specify that we want a log of the processing step through the 'generate_logging'. The 'use_scheduler' must be set to True if more than 1 CPU is used. """ study_config = StudyConfig( number_of_cpus=1, generate_logging=True, use_scheduler=True, output_directory=outdir) """ Get the toy dataset ------------------- The toy dataset is composed of a functional image that is downloaded if it is necessary throught the 'get_sample_data' function and exported locally. """ localizer_dataset = get_sample_data("localizer_extra") """ Pipeline definition ------------------- The pipeline XML description is first imported throught the 'get_process_instance' method, and the resulting pipeline instance is parametrized: in this example we decided to set the date in the converted file name and we set two DICOM directories to be converted in Nifti format. """ pipeline = get_process_instance(pipeline_name) pipeline.image_file = localizer_dataset.fmri pipeline.repetition_time = 2.0 pipeline.exclude_volume = [] pipeline.roi_size = 21 pipeline.score_file = os.path.join(outdir, "scores.json") """ Pipeline representation ----------------------- By executing this block of code, a pipeline representation can be displayed. This representation is composed of boxes connected to each other. """ if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() """ Pipeline execution ------------------ Finally the pipeline is eecuted in the defined 'study_config'. """ study_config.run(pipeline) """ Access the result ----------------- Display the computed scores """ scores_file = pipeline.scores_file with open(scores_file, "r") as _file: scores = json.load(_file) for key, value in scores.iteritems(): print "{0} = {1}".format(key, value)
def pilot_fsl_preproc(): """ FSL preprocessings ================== """ # System import import os import sys import datetime import PySide.QtGui as QtGui # CAPSUL import from capsul.qt_gui.widgets import PipelineDevelopperView from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance """ Study configuration ------------------- We first define the working directory and guarantee this folder exists on the file system: """ working_dir = "/volatile/nsap/clindmri/fslpreproc" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ And then define the study configuration (here we activate the smart caching module that will be able to remember which process has already been processed): """ study_config = StudyConfig( modules=["SmartCachingConfig", "FSLConfig", "MatlabConfig", "SPMConfig", "NipypeConfig"], use_smart_caching=True, fsl_config="/etc/fsl/4.1/fsl.sh", use_fsl=True, output_directory=working_dir) # Create pipeline start_time = datetime.datetime.now() print "Start Pipeline Creation", start_time pipeline = get_process_instance("clindmri.preproc.fsl_preproc.xml") print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # View pipeline if 0: app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() del view1 # Set pipeline input parameters pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz" pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval" pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec" print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) #print pipeline.nodes["eddy"].process._nipype_interface.inputs print pipeline.nodes["eddy"].process._nipype_interface.cmdline # Execute the pipeline in the configured study study_config.run(pipeline, verbose=1)
self.assertFalse(self.pipeline.nodes["way22"].activated) self.pipeline.workflow_ordered_nodes() self.assertEqual(self.pipeline.workflow_repr, "way11->way12") def test(): """ Function to execute unitest """ suite = unittest.TestLoader().loadTestsFromTestCase(TestPipeline) runtime = unittest.TextTestRunner(verbosity=2).run(suite) return runtime.wasSuccessful() if __name__ == "__main__": print("RETURNCODE: ", test()) if 1: import sys from soma.qt_gui.qt_backend import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView app = QtGui.QApplication.instance() if not app: app = QtGui.QApplication(sys.argv) pipeline = MyPipeline() setattr(pipeline.nodes_activation, "way11", False) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() del view1
def pilot_dcm2nii(): """ Imports ------- This code needs 'capsul' and 'mmutils' package in order to instanciate and execute the pipeline and to get a toy dataset. These packages are available in the 'neurospin' source list or in pypi. """ import os import sys import shutil import tempfile from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance from mmutils.toy_datasets import get_sample_data """ Parameters ---------- The 'pipeline_name' parameter contains the location of the pipeline XML description that will perform the DICOMs conversion, and the 'outdir' the location of the pipeline's results: in this case a temporary directory. """ pipeline_name = "dcmio.dcmconverter.dcm_to_nii.xml" outdir = tempfile.mkdtemp() """ Capsul configuration -------------------- A 'StudyConfig' has to be instantiated in order to execute the pipeline properly. It enables us to define the results directory through the 'output_directory' attribute, the number of CPUs to be used through the 'number_of_cpus' attributes, and to specify that we want a log of the processing step through the 'generate_logging'. The 'use_scheduler' must be set to True if more than 1 CPU is used. """ study_config = StudyConfig( modules=[], output_directory=outdir, number_of_cpus=1, generate_logging=True, use_scheduler=True) """ Get the toy dataset ------------------- The toy dataset is composed of a 3D heart dicom image that is downloaded if it is necessary throught the 'get_sample_data' function and exported locally in a 'heart.dcm' file. """ dicom_dataset = get_sample_data("dicom") dcmfolder = os.path.join(outdir, "dicom") if not os.path.isdir(dcmfolder): os.makedirs(dcmfolder) shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm")) """ Pipeline definition ------------------- The pipeline XML description is first imported throught the 'get_process_instance' method, and the resulting pipeline instance is parametrized: in this example we decided to set the date in the converted file name and we set two DICOM directories to be converted in Nifti format. """ pipeline = get_process_instance(pipeline_name) pipeline.date_in_filename = True pipeline.dicom_directories = [dcmfolder, dcmfolder] pipeline.additional_informations = [[("Provided by", "Neurospin@2015")], [("Provided by", "Neurospin@2015"), ("TR", "1500")]] pipeline.dcm_tags = [("TR", [("0x0018", "0x0080")]), ("TE", [("0x0018", "0x0081")])] """ Pipeline representation ----------------------- By executing this block of code, a pipeline representation can be displayed. This representation is composed of boxes connected to each other. """ if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() """ Pipeline execution ------------------ Finally the pipeline is eecuted in the defined 'study_config'. """ study_config.run(pipeline) """ Access the result ----------------- The 'nibabel' package is used to load the generated images. We display the numpy array shape and the stored repetiton and echo times: in order to load the 'descrip' image field we use the 'json' package. """ import json import copy import nibabel generated_images = pipeline.filled_converted_files for fnames in generated_images: print(">>>", fnames, "...") im = nibabel.load(fnames[0]) print("shape=", im.get_data().shape) header = im.get_header() a = str(header["descrip"]) a = a.strip() description = json.loads(copy.deepcopy(a)) print("TE=", description["TE"]) print("TR=", description["TR"]) print("Provided by=", description["Provided by"])
def pilot_preproc_spm_fmri(enable_display=False): """ FMRI preprocessings =================== Preprocessing with the SPM slice timing and a normalization to a given template. Start to import required modules: """ import os from mmutils.toy_datasets import get_sample_data from capsul.study_config import StudyConfig from capsul.api import get_process_instance """ Study configuration ------------------- We first define the working directory and guarantee this folder exists on the file system: """ working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_preproc_spm_fmri" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ Then define the study configuration: """ study_config = StudyConfig( modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"], use_smart_caching=False, fsl_config="/etc/fsl/4.1/fsl.sh", use_fsl=True, matlab_exec="/neurospin/local/bin/matlab", use_matlab=True, spm_directory="/i2bm/local/spm8", use_spm=True, output_directory=working_dir, number_of_cpus=1, generate_logging=True, use_scheduler=True,) """ Load the toy dataset -------------------- To do so, we use the get_sample_data function to download the toy dataset on the local file system (here localizer data): """ toy_dataset = get_sample_data("localizer") template_dataset = get_sample_data("mni_1mm") """ The toy_dataset is an Enum structure with some specific elements of interest: * fmri: the functional volume. * anat: the structural volume. * TR: the repetition time. Processing definition --------------------- First create the :ref:`slice timing pipeline <clinfmri.preproc.FmriPreproc>` that define the different step of the processings: """ pipeline = get_process_instance("clinfmri.preproc.converted_fmri_preproc") print pipeline.get_input_spec() """ Now we need now to parametrize this pipeline: """ pipeline.fmri_file = toy_dataset.fmri pipeline.structural_file = toy_dataset.anat pipeline.realign_register_to_mean = True pipeline.select_slicer = "spm" pipeline.select_normalization = "fmri" pipeline.template_file = template_dataset.brain pipeline.force_repetition_time = toy_dataset.TR pipeline.force_slice_orders = [index + 1 for index in range(40)] """ It is possible to display the pipeline. """ if enable_display: import sys from PySide import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView app = QtGui.QApplication(sys.argv) view = PipelineDevelopperView(pipeline) view.show() app.exec_() """ The pipeline is now ready to be run: """ study_config.run(pipeline, executer_qc_nodes=False, verbose=1) """ Results ------- Finally, we print the pipeline outputs: """ print("\nOUTPUTS\n") for trait_name, trait_value in pipeline.get_outputs().items(): print("{0}: {1}".format(trait_name, trait_value))
def pilot_bet(enable_display=False): """ Brain extractio Tool ==================== """ import os from mmutils.toy_datasets import get_sample_data from capsul.study_config import StudyConfig from capsul.api import get_process_instance working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_bet" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ Then define the study configuration: """ study_config = StudyConfig( modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"], use_smart_caching=False, fsl_config="/etc/fsl/4.1/fsl.sh", use_fsl=True, matlab_exec="/neurospin/local/bin/matlab", use_matlab=False, spm_directory="/i2bm/local/spm8", use_spm=False, output_directory=working_dir) """ Load the toy dataset -------------------- To do so, we use the get_sample_data function to download the toy dataset on the local file system (here localizer data): """ template_dataset = get_sample_data("mni_1mm") """ Processing definition --------------------- """ pipeline = get_process_instance("clinfmri.utils.converted_fsl_bet") print pipeline.get_input_spec() """ Now we need now to parametrize this pipeline: """ pipeline.input_image_file = template_dataset.brain pipeline.generate_binary_mask = True """ It is possible to display the pipeline. """ if enable_display: import sys from PySide import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView app = QtGui.QApplication(sys.argv) view = PipelineDevelopperView(pipeline) view.show() app.exec_() """ The pipeline is now ready to be run: """ study_config.run(pipeline, executer_qc_nodes=False, verbose=1) """ Results ------- Finally, we print the pipeline outputs: """ print("\nOUTPUTS\n") for trait_name, trait_value in pipeline.get_outputs().items(): print("{0}: {1}".format(trait_name, trait_value))
def pilot_dcm2nii(): """ Imports ------- This code needs 'capsul' and 'mmutils' package in order to instanciate and execute the pipeline and to get a toy dataset. These packages are available in the 'neurospin' source list or in pypi. """ import os import sys import shutil import tempfile from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance from mmutils.toy_datasets import get_sample_data """ Parameters ---------- The 'pipeline_name' parameter contains the location of the pipeline XML description that will perform the DICOMs conversion, and the 'outdir' the location of the pipeline's results: in this case a temporary directory. """ pipeline_name = "dcmio.dcmconverter.dcm_to_nii.xml" outdir = tempfile.mkdtemp() """ Capsul configuration -------------------- A 'StudyConfig' has to be instantiated in order to execute the pipeline properly. It enables us to define the results directory through the 'output_directory' attribute, the number of CPUs to be used through the 'number_of_cpus' attributes, and to specify that we want a log of the processing step through the 'generate_logging'. The 'use_scheduler' must be set to True if more than 1 CPU is used. """ study_config = StudyConfig(modules=[], output_directory=outdir, number_of_cpus=1, generate_logging=True, use_scheduler=True) """ Get the toy dataset ------------------- The toy dataset is composed of a 3D heart dicom image that is downloaded if it is necessary throught the 'get_sample_data' function and exported locally in a 'heart.dcm' file. """ dicom_dataset = get_sample_data("dicom") dcmfolder = os.path.join(outdir, "dicom") if not os.path.isdir(dcmfolder): os.makedirs(dcmfolder) shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm")) """ Pipeline definition ------------------- The pipeline XML description is first imported throught the 'get_process_instance' method, and the resulting pipeline instance is parametrized: in this example we decided to set the date in the converted file name and we set two DICOM directories to be converted in Nifti format. """ pipeline = get_process_instance(pipeline_name) pipeline.date_in_filename = True pipeline.dicom_directories = [dcmfolder, dcmfolder] pipeline.additional_informations = [[("Provided by", "Neurospin@2015")], [("Provided by", "Neurospin@2015"), ("TR", "1500")]] pipeline.dcm_tags = [("TR", [("0x0018", "0x0080")]), ("TE", [("0x0018", "0x0081")])] """ Pipeline representation ----------------------- By executing this block of code, a pipeline representation can be displayed. This representation is composed of boxes connected to each other. """ if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() """ Pipeline execution ------------------ Finally the pipeline is eecuted in the defined 'study_config'. """ study_config.run(pipeline) """ Access the result ----------------- The 'nibabel' package is used to load the generated images. We display the numpy array shape and the stored repetiton and echo times: in order to load the 'descrip' image field we use the 'json' package. """ import json import copy import nibabel generated_images = pipeline.filled_converted_files for fnames in generated_images: print(">>>", fnames, "...") im = nibabel.load(fnames[0]) print("shape=", im.get_data().shape) header = im.get_header() a = str(header["descrip"]) a = a.strip() description = json.loads(copy.deepcopy(a)) print("TE=", description["TE"]) print("TR=", description["TR"]) print("Provided by=", description["Provided by"])
def pilot_gdti_estimation(): """ Generalized diffusion tensor estimation ======================================= """ # System import import os import sys import datetime import PySide.QtGui as QtGui # CAPSUL import from capsul.qt_gui.widgets import PipelineDevelopperView from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance """ Study configuration ------------------- We first define the working directory and guarantee this folder exists on the file system: """ working_dir = "/volatile/nsap/clindmri/gdti" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ And then define the study configuration (here we activate the smart caching module that will be able to remember which process has already been processed): """ study_config = StudyConfig( modules=["SmartCachingConfig"], use_smart_caching=True, output_directory=working_dir) # Create pipeline start_time = datetime.datetime.now() print "Start Pipeline Creation", start_time pipeline = get_process_instance("clindmri.estimation.gdti.xml") print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # View pipeline if 0: app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() del view1 # Set pipeline input parameters pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz" pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval" pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec" pipeline.order = 2 pipeline.odf = False print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # Execute the pipeline in the configured study study_config.run(pipeline, verbose=1)
app = QtGui.QApplication.instance() if not app: app = QtGui.QApplication(sys.argv) #pipeline = Pipeline1() #pipeline.main_inputs = ['/dir/file%d' % i for i in range(4)] #pipeline.left_out = pipeline.main_inputs[2] #pipeline.subject = 'subject2' #pipeline.output_directory = '/dir/out_dir' #view1 = PipelineDevelopperView(pipeline, allow_open_controller=True, #show_sub_pipelines=True, #enable_edition=True) #view1.show() pipeline2 = PipelineLOO() pipeline2.main_inputs = ['/dir/file%d' % i for i in range(4)] pipeline2.left_out = pipeline2.main_inputs[2] pipeline2.subjects = ['subject%d' % i for i in range(4)] pipeline2.output_directory = '/dir/out_dir' wf = pipeline_workflow.workflow_from_pipeline(pipeline2, create_directories=False) view2 = PipelineDevelopperView(pipeline2, allow_open_controller=True, show_sub_pipelines=True, enable_edition=True) view2.show() app.exec_() #del view1 del view2
Processing definition: create the <clinfmri.preproc.FmriPreproc> that define the different step of the processings. """ pipeline = get_process_instance("clinfmri.preproc.fmri_preproc.xml") """ It is possible to display the pipeline. """ if args.display: import sys from PySide import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView app = QtGui.QApplication(sys.argv) view = PipelineDevelopperView(pipeline) view.show() app.exec_() """ Now to parametrize the pipeline pipeline. """ pipeline.fmri_file = funcfile pipeline.realign_register_to_mean = True pipeline.select_slicer = args.timings pipeline.select_normalization = args.normalization pipeline.force_repetition_time = args.repetition_time pipeline.force_slice_orders = args.slice_order pipeline.realign_wrap = [0, 1, 0] pipeline.realign_write_wrap = [0, 1, 0] pipeline.ref_slice = args.ref_slice if args.template is not None:
except: pass try: os.unlink(output_name) except: pass def test(): """ Function to execute unitest """ suite = unittest.TestLoader().loadTestsFromTestCase(TestPipelineWithTemp) runtime = unittest.TextTestRunner(verbosity=2).run(suite) return runtime.wasSuccessful() if __name__ == "__main__": print "RETURNCODE: ", test() if 1: import sys from soma.qt_gui.qt_backend import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView app = QtGui.QApplication(sys.argv) pipeline = MyPipeline() pipeline.input_image = '/data/file.txt' pipeline.output_image = '/data/output_file.txt' view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() del view1
def test(): """ Function to execute unitest """ suite = unittest.TestLoader().loadTestsFromTestCase(TestComplexPipeline) runtime = unittest.TextTestRunner(verbosity=2).run(suite) return runtime.wasSuccessful() if __name__ == '__main__': print('Test return code:', test()) if False: from pprint import pprint pipeline = get_process_instance(ComplexPipeline) import sys from soma.qt_gui.qt_backend import QtGui from capsul.qt_gui.widgets import PipelineDevelopperView #from capsul.qt_gui.widgets.activation_inspector import ActivationInspectorApp #app = ActivationInspectorApp(ComplexPipeline) app = QtGui.QApplication(sys.argv) view = PipelineDevelopperView(pipeline, allow_open_controller=True, show_sub_pipelines=True) view.show() app.exec_() del view