Example #1
0
    def setUp(self):
        self.pipeline = DummyPipeline()

        tmpout = tempfile.mkstemp('.txt', prefix='capsul_test_')
        os.close(tmpout[0])
        os.unlink(tmpout[1])

        # use a custom temporary soma-workflow dir to avoid concurrent
        # access problems
        tmpdb = tempfile.mkstemp('', prefix='soma_workflow')
        os.close(tmpdb[0])
        os.unlink(tmpdb[1])
        self.soma_workflow_temp_dir = tmpdb[1]
        os.mkdir(self.soma_workflow_temp_dir)
        swf_conf = '[%s]\nSOMA_WORKFLOW_DIR = %s\n' \
            % (socket.gethostname(), tmpdb[1])
        swconfig.Configuration.search_config_path \
            = staticmethod(lambda : StringIO.StringIO(swf_conf))

        self.output = tmpout[1]
        self.pipeline.input = '/tmp/file_in.nii'
        self.pipeline.output = self.output
        study_config = StudyConfig(modules=['SomaWorkflowConfig'])
        study_config.input_directory = '/tmp'
        study_config.somaworkflow_computing_resource = 'localhost'
        study_config.somaworkflow_computing_resources_config.localhost = {
            'transfer_paths': [],
        }
        self.study_config = study_config
Example #2
0
 def test_study_config_fsl(self):
     if not sys.platform.startswith('win'):
         try:
             study_config = StudyConfig(use_fsl=True)
         except EnvironmentError as e:
             # If FSL cannot be configured automatically, skip the test
             print(
                 'WARNING: Skip FSL test because it cannot be configured automatically:',
                 str(e),
                 file=sys.stderr)
             return
         test_image = '/usr/share/data/fsl-mni152-templates/MNI152_T1_1mm_brain.nii.gz'
         if not osp.exists(test_image):
             fsl_dir = os.environ.get('FSLDIR')
             test_image = None
             if not fsl_dir and study_config.fsl_config is not Undefined:
                 fsl_dir = osp.dirname(
                     osp.dirname(osp.dirname(study_config.fsl_config)))
             if fsl_dir:
                 test_image = glob(
                     osp.join(
                         fsl_dir,
                         'fslpython/envs/fslpython/lib/python*/site-packages/nibabel/tests/data/anatomical.nii'
                     ))
                 if test_image:
                     test_image = test_image[0]
             if not test_image:
                 print(
                     'WARNING: Skip FSL test because test data cannot be found',
                     file=sys.stderr)
                 return
         bet = study_config.get_process_instance(Bet)
         with tempfile.NamedTemporaryFile(suffix='.nii.gz') as tmp:
             bet.run(input_image=test_image, output_image=tmp.name)
             self.assertTrue(os.stat(tmp.name).st_size != 0)
Example #3
0
 def setUp(self):
     study_config = StudyConfig() #modules=StudyConfig.default_modules \
                                #+ ['FomConfig'])
     self.pipeline = DummyPipeline()
     self.pipeline.set_study_config(study_config)
     self.tmpdir = tempfile.mkdtemp()
     self.pipeline.input = osp.join(self.tmpdir, 'file_in.nii')
     self.pipeline.output1 = osp.join(self.tmpdir, '/tmp/file_out1.nii')
     self.pipeline.output2 = osp.join(self.tmpdir, '/tmp/file_out2.nii')
     self.pipeline.output3 = osp.join(self.tmpdir, '/tmp/file_out3.nii')
     study_config.input_directory = self.tmpdir
     study_config.somaworkflow_computing_resource = 'localhost'
     study_config.somaworkflow_computing_resources_config.localhost = {
         'transfer_paths': [study_config.input_directory],
     }
     self.study_config = study_config
     engine = self.study_config.engine
     engine.load_module('spm')
     #with engine.settings as session:
         #ids = [c.config_id for c in session.configs('spm', 'global')]
         #for id in ids:
             #session.remove_config('spm', 'global', {'config_id': id})
         #session.new_config('spm', 'global',
                            #{'version': '12', 'standalone': True})
     study_config.spm_standalone = True
     study_config.spm_version = '12'
     study_config.somaworkflow_keep_succeeded_workflows = False
     self.exec_ids = []
Example #4
0
def pilot_gdti_estimation():
    """
    Generalized diffusion tensor estimation
    =======================================
    """
    # System import
    import os
    import sys
    import datetime
    import PySide.QtGui as QtGui

    # CAPSUL import
    from capsul.qt_gui.widgets import PipelineDevelopperView
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance
    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/clindmri/gdti"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)
    """
    And then define the study configuration (here we activate the smart
    caching module that will be able to remember which process has already been
    processed):
    """
    study_config = StudyConfig(modules=["SmartCachingConfig"],
                               use_smart_caching=True,
                               output_directory=working_dir)

    # Create pipeline
    start_time = datetime.datetime.now()
    print "Start Pipeline Creation", start_time
    pipeline = get_process_instance("clindmri.estimation.gdti.xml")
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    # View pipeline
    if 0:
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
        del view1

    # Set pipeline input parameters
    pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz"
    pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval"
    pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec"
    pipeline.order = 2
    pipeline.odf = False
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    # Execute the pipeline in the configured study
    study_config.run(pipeline, verbose=1)
    def test_execution_without_cache(self):
        """ Execute a process without cache.
        """
        # Create a study configuration
        self.output_directory = tempfile.mkdtemp()
        self.study_config = StudyConfig(modules=["SmartCachingConfig"],
                                        use_smart_caching=False,
                                        output_directory=self.output_directory)

        # Call the test
        self.execution_dummy()

        # Rm temporary folder
        shutil.rmtree(self.output_directory)
 def setUp(self):
     self.pipeline = DummyPipeline()
     self.pipeline.input = '/tmp/file_in.nii'
     self.pipeline.output1 = '/tmp/file_out1.nii'
     self.pipeline.output2 = '/tmp/file_out2.nii'
     self.pipeline.output3 = '/tmp/file_out3.nii'
     study_config = StudyConfig()  #modules=StudyConfig.default_modules \
     #+ ['FomConfig'])
     study_config.input_directory = '/tmp'
     study_config.somaworkflow_computing_resource = 'localhost'
     study_config.somaworkflow_computing_resources_config.localhost = {
         'transfer_paths': [study_config.input_directory],
     }
     self.study_config = study_config
Example #7
0
 def test_study_config_fs(self):
     freesurfer_config = "/i2bm/local/freesurfer/SetUpFreeSurfer.sh"
     if not os.path.exists(freesurfer_config) \
             or not sys.platform.startswith('linux'):
         # skip this test if FS is not available, or not running
         # on linux (other systems may see this directory but cannot use it)
         return
     study_config = StudyConfig(modules=['FreeSurferConfig'],
                                freesurfer_config = freesurfer_config)
     study_config.use_fs = True
     for varname in ["FREESURFER_HOME", "FSF_OUTPUT_FORMAT", "MNI_DIR",
                     "FSFAST_HOME", "FMRI_ANALYSIS_DIR", "FUNCTIONALS_DIR",
                     "MINC_BIN_DIR", "MNI_DATAPATH"]:
         self.assertTrue(os.environ.get(varname) is not None,
                         msg='%s environment variable not set' % varname)
Example #8
0
 def test_study_config_fsl(self):
     if not sys.platform.startswith('win'):
         fsl_h = "/etc/fsl/4.1/fsl.sh"
         
         if os.path.exists(fsl_h):
             study_config = StudyConfig(modules=['FSLConfig'],
                 fsl_config = fsl_h)
             if not study_config.use_fsl:
                 return # skip this test if FSL is not available
             for varname in ["FSLDIR", "FSLOUTPUTTYPE", "FSLTCLSH", 
                             "FSLWISH", "FSLREMOTECALL", "FSLLOCKDIR", 
                             "FSLMACHINELIST", "FSLBROWSER"]:
                 self.assertTrue(os.environ.get(varname) is not None, 
                                 msg='%s environment variable not set' 
                                     % varname)
Example #9
0
 def run_study_config_instanciation(self, tests, test_description,
                                    user_config_directory):
     for arguments, results in tests:
         args, kwargs = arguments
         sargs = ', '.join(repr(i) for i in args)
         if kwargs:
             sargs += ', '.join('%s=%s' % (repr(i), repr(j))
                                for i, j in six.iteritems(kwargs))
         sc = StudyConfig(*args, **kwargs)
         (expected_config, expected_modules, global_config_file,
          study_config_file) = results
         if global_config_file:
             global_config_file = os.path.join(user_config_directory,
                                               global_config_file)
         if study_config_file:
             study_config_file = os.path.join(user_config_directory,
                                              study_config_file)
         config = sc.get_configuration_dict()
         modules = sorted(sc.modules.keys())
         try:
             self.assertEqual(set(config), set(expected_config))
             for name, value in six.iteritems(expected_config):
                 self.assertEqual(
                     config[name], value,
                     'StudyConfig(%s) %s attribute %s should be %s but is '
                     '%s' % (sargs, test_description, name, repr(value),
                             repr(getattr(sc, name))))
             self.assertEqual(
                 modules, expected_modules,
                 'StudyConfig(%s) %s modules are %s but expected value is '
                 '%s' % (sargs, test_description, repr(modules),
                         repr(expected_modules)))
             self.assertEqual(
                 sc.global_config_file, global_config_file,
                 'StudyConfig(%s) %s global_config_file should be %s but '
                 'is %s' %
                 (sargs, test_description, repr(global_config_file),
                  repr(sc.global_config_file)))
             self.assertEqual(
                 sc.study_config_file, study_config_file,
                 'StudyConfig(%s) %s study_config_file should be %s but is '
                 '%s' % (sargs, test_description, repr(study_config_file),
                         repr(sc.study_config_file)))
         except Exception as e:
             raise EnvironmentError(
                 'When testing StudyConfig(*{0}, **{1}), got the following error: {2}'
                 .format(args, kwargs, e))
Example #10
0
    def setUp(self):
        self.pipeline = DummyPipeline()

        tmpout = tempfile.mkstemp('.txt', prefix='capsul_test_')
        os.close(tmpout[0])
        os.unlink(tmpout[1])

        self.output = tmpout[1]
        self.pipeline.input = '/tmp/file_in.nii'
        self.pipeline.output = self.output
        study_config = StudyConfig(modules=['SomaWorkflowConfig'])
        study_config.input_directory = '/tmp'
        study_config.somaworkflow_computing_resource = 'localhost'
        study_config.somaworkflow_computing_resources_config.localhost = {
            'transfer_paths': [],
        }
        self.study_config = study_config
Example #11
0
    def setUp(self):
        self.pipeline = DummyPipeline()

        tmpdir = tempfile.mkdtemp('capsul_output_test')
        tmpout = os.path.join(tmpdir, 'capsul_test_node3_out.txt')

        self.tmpdir = tmpdir
        self.pipeline.input = os.path.join(tmpdir, 'file_in.nii')
        with open(self.pipeline.input, 'w') as f:
            print('Initial file content.', file=f)
        self.pipeline.output = tmpout
        study_config = StudyConfig(modules=['SomaWorkflowConfig'])
        study_config.input_directory = tmpdir
        study_config.somaworkflow_computing_resource = 'localhost'
        study_config.somaworkflow_computing_resources_config.localhost = {
            'transfer_paths': [],
        }
        self.study_config = study_config
Example #12
0
    def __init__(self, 
                 database_location,
                 database,
                 config=None):
        '''
        CapsulEngine constructor should not be called directly.
        Use capsul_engine() factory function instead.
        '''
        super(CapsulEngine, self).__init__()
        
        self._database_location = database_location
        self._database = database

        db_config = database.json_value('config')

        self._loaded_modules = {}
        self.modules = database.json_value('modules')
        if self.modules is None:
            self.modules = self.default_modules
        self.load_modules()
        
        execution_context = from_json(database.json_value('execution_context'))
        if execution_context is None:
            execution_context = ExecutionContext()
        self._execution_context = execution_context
            
        self._processing_engine = from_json(database.json_value('processing_engine'))        
        self._metadata_engine = from_json(database.json_value('metadata_engine'))
        
        for cfg in (db_config, config):
            if cfg:
                for n, v in cfg.items():
                    if isinstance(v, dict):
                        o = getattr(self, n)
                        if isinstance(o, Controller):
                            o.import_from_dict(v)
                            continue
                    setattr(self, n, v)

        self.init_modules()

        self.study_config = StudyConfig(engine=self)
Example #13
0
    def test_study_config_fom(self):
        initial_config = {
            "input_directory": "/blop/basetests",
            "output_directory": "/blop/basetests",
            "input_fom": "",
            "output_fom": "",
            "shared_fom": "",
            "spm_directory": "/i2bm/local/spm8-standalone",
            "use_soma_workflow": False,
            "use_fom": True,
        }

        #soma_app = Application('soma.fom', '1.0')
        #soma_app.plugin_modules.append('soma.fom')
        #soma_app.initialize()
        study_config = StudyConfig(init_config=initial_config,
                                   modules=StudyConfig.default_modules +
                                   ['BrainVISAConfig', 'FomConfig'])
        self.assertTrue(hasattr(study_config.modules_data, 'foms'))
        self.assertTrue(hasattr(study_config.modules_data, 'fom_atp'))
        self.assertTrue(hasattr(study_config.modules_data, 'fom_pta'))
Example #14
0
    def __init__(self, database_location, database, require):
        '''
        CapsulEngine.__init__(self, database_location, database, config=None)

        The CapsulEngine constructor should not be called directly.
        Use :func:`capsul_engine` factory function instead.
        '''
        super(CapsulEngine, self).__init__()

        self._settings = None

        self._database_location = database_location
        self._database = database

        self._loaded_modules = set()
        self.load_modules(require)

        from capsul.study_config.study_config import StudyConfig
        self.study_config = StudyConfig(engine=self)

        self._metadata_engine = from_json(
            database.json_value('metadata_engine'))

        self._connected_resource = ''
Example #15
0
def pilot_fsl_preproc():
    """
    FSL preprocessings
    ==================
    """
    # System import
    import os
    import sys
    import datetime
    import PySide.QtGui as QtGui

    # CAPSUL import
    from capsul.qt_gui.widgets import PipelineDevelopperView
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/clindmri/fslpreproc"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    And then define the study configuration (here we activate the smart
    caching module that will be able to remember which process has already been
    processed):
    """
    study_config = StudyConfig(
        modules=["SmartCachingConfig", "FSLConfig", "MatlabConfig",
                 "SPMConfig", "NipypeConfig"],
        use_smart_caching=True,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,        
        output_directory=working_dir)

    # Create pipeline
    start_time = datetime.datetime.now()
    print "Start Pipeline Creation", start_time
    pipeline = get_process_instance("clindmri.preproc.fsl_preproc.xml")
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    # View pipeline
    if 0:
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
        del view1

    # Set pipeline input parameters
    pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz"
    pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval"
    pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec"
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    #print pipeline.nodes["eddy"].process._nipype_interface.inputs
    print pipeline.nodes["eddy"].process._nipype_interface.cmdline

    # Execute the pipeline in the configured study
    study_config.run(pipeline, verbose=1)
Example #16
0
from capsul.study_config.study_config import StudyConfig

# CAPS import
from caps.nsap.functional_statistic.pipeline import SpmFirstLevelPipeline
from capsul.process.loader import get_process_instance
from caps.toy_datasets import get_sample_data


# Configure the environment
start_time = datetime.datetime.now()
print "Start Configuration", start_time
study_config = StudyConfig(
    modules=["MatlabConfig", "SPMConfig", "NipypeConfig", "FSLConfig",
             "FreeSurferConfig", "SmartCachingConfig"],
    matlab_exec="/neurospin/local/bin/matlab",
    spm_directory="/i2bm/local/spm8-6313",
    use_matlab=True,
    use_spm=True,
    use_nipype=True,
    use_smart_caching=True,
    output_directory="/volatile/nsap/catalogue/spm_first_level/")
print "Done in {0} seconds".format(datetime.datetime.now() - start_time)


# Create pipeline
start_time = datetime.datetime.now()
print "Start Pipeline Creation", start_time
pipeline = get_process_instance(
    "caps.nsap.functional_statistic.pipeline.spm_first_level_pipeline.xml")
print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

Example #17
0
def pilot_qa_fmri():
    """
    Imports
    -------

    This code needs 'capsul' and 'mmutils' package in order to instanciate and
    execute the pipeline and to get a toy dataset.
    These packages are available in the 'neurospin' source list or in pypi.
    """
    # Capsul import
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance

    # Mmutils import
    from mmutils.toy_datasets import get_sample_data
    """
    Parameters
    ----------

    The 'pipeline_name' parameter contains the location of the pipeline XML
    description that will perform the DICOMs conversion, and the 'outdir' the
    location of the pipeline's results: in this case a temporary directory.
    """

    pipeline_name = "mmqa.fmri.fmri_quality_assurance_bbox.xml"
    outdir = tempfile.mkdtemp()
    """
    Capsul configuration
    --------------------

    A 'StudyConfig' has to be instantiated in order to execute the pipeline
    properly. It enables us to define the results directory through the
    'output_directory' attribute, the number of CPUs to be used through the
    'number_of_cpus' attributes, and to specify that we want a log of the
    processing step through the 'generate_logging'. The 'use_scheduler'
    must be set to True if more than 1 CPU is used.
    """
    study_config = StudyConfig(number_of_cpus=1,
                               generate_logging=True,
                               use_scheduler=True,
                               output_directory=outdir)
    """
    Get the toy dataset
    -------------------

    The toy dataset is composed of a functional image that is downloaded
    if it is necessary throught the 'get_sample_data' function and exported
    locally.
    """

    localizer_dataset = get_sample_data("localizer_extra")
    """
    Pipeline definition
    -------------------

    The pipeline XML description is first imported throught the
    'get_process_instance' method, and the resulting pipeline instance is
    parametrized: in this example we decided to set the date in the converted
    file name and we set two DICOM directories to be converted in Nifti
    format.
    """

    pipeline = get_process_instance(pipeline_name)
    pipeline.image_file = localizer_dataset.fmri
    pipeline.repetition_time = 2.0
    pipeline.exclude_volume = []
    pipeline.roi_size = 21
    pipeline.score_file = os.path.join(outdir, "scores.json")
    """
    Pipeline representation
    -----------------------

    By executing this block of code, a pipeline representation can be
    displayed. This representation is composed of boxes connected to each
    other.
    """
    if 0:
        from capsul.qt_gui.widgets import PipelineDevelopperView
        from PySide import QtGui
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
    """
    Pipeline execution
    ------------------

    Finally the pipeline is eecuted in the defined 'study_config'.
    """
    study_config.run(pipeline)
    """
    Access the result
    -----------------

    Display the computed scores
    """

    scores_file = pipeline.scores_file

    with open(scores_file, "r") as _file:
        scores = json.load(_file)

    for key, value in scores.iteritems():
        print "{0} = {1}".format(key, value)
Example #18
0
def morphologist_all(t1file, sid, outdir, study="morphologist", waittime=10,
                     somaworkflow=False,
                     spmexec="/i2bm/local/spm8-standalone/run_spm8.sh",
                     spmdir="/i2bm/local/spm8-standalone"):
    """ Performs all the Morphologist steps.

    Steps:

    1- Ensure image orientation and reorient it if needed (Prepare Subject for
       Anatomical Pipeline).
    2- Computation of a brain mask (Brain Mask Segmentation).
    3- Computation of a mask for each hemisphere (Split Brain Mask).
    4- A grey/white classification of each hemisphere to perform "Voxel Based
       Morphometry" (Grey White Classification) and spherical triangulation of
       cortical hemispheres (Grey White Surface).
    5- Spherical triangulation of the external interface of the cortex of one
       or two hemispheres (Get Spherical Hemi Surface).
    6- Computation of a graph representing the cortical fold topography
       (Cortical Fold Graph).
    7- Automatic identification of the cortical sulci (Automatic Sulci
       Recognition), located in the "sulci" toolbox.

    The execution is performed with soma_workflow that has to be installed in
    the bv_env environment.

    To check the worklow submission, use the 'soma_workflow_gui' command.

    If the input 't1file' has no the expected extension, an Exception will
    be raised.
    If the $outdir/$study/$sid has already been created, an Exception will
    be raised.

    Parameters
    ----------
    t1file: str (mandatory)
        the path to a ".nii.gz" anatomical T1 weighted file.
    sid: str (mandatory)
        a subject identifier.
    outdir: str (mandatory)
        the morphologist output files will be written in $outdir/$study/$sid.
    study: str (mandatory)
        the name of the study.
    waittime: float (optional, default 10)
        a delay (in seconds) used to check the worflow status.
    somaworkflow: bool (optional, default False)
        if True use somaworkflow for the execution.
    spmexec: str (optional)
        the path to the standalone SPM execution file.
    spmdir: str (optional)
        the standalone SPM directory.

    Returns
    -------
    wffile: str
        a file containing the submitted workflow.
    wfid: int
        the submitted workflow identifier.
    wfstatus: str
        the submited worflow status afer 'waittime' seconds.
    """
    # Check roughly the input file extension
    if not t1file.endswith(".nii.gz"):
        raise Exception("'{0}' is not a COMPRESSED NIFTI file.".format(t1file))

    # Create a configuration for the morphologist study
    study_config = StudyConfig(
        modules=StudyConfig.default_modules + ["FomConfig", "BrainVISAConfig"])
    study_dict = {
        "name": "morphologist_fom",
        "input_directory": outdir,
        "output_directory": outdir,
        "input_fom": "morphologist-auto-nonoverlap-1.0",
        "output_fom": "morphologist-auto-nonoverlap-1.0",
        "shared_fom": "shared-brainvisa-1.0",
        "spm_directory": spmdir,
        "use_soma_workflow": True,
        "use_fom": True,
        "spm_standalone": True,
        "use_matlab": False,
        "volumes_format": "NIFTI gz",
        "meshes_format": "GIFTI",
        "use_spm": True,
        "spm_exec": spmexec,
        "study_config.somaworkflow_computing_resource": "localhost",
        "somaworkflow_computing_resources_config": {
            "localhost": {
            }
        }
    }
    study_config.set_study_configuration(study_dict)

    # Create the morphologist pipeline
    pipeline = get_process_instance(
        "morphologist.capsul.morphologist.Morphologist")
    morphologist_pipeline = process_with_fom.ProcessWithFom(
        pipeline, study_config)
    morphologist_pipeline.attributes = dict(
        (trait_name, getattr(morphologist_pipeline, trait_name))
        for trait_name in morphologist_pipeline.user_traits())
    morphologist_pipeline.attributes["center"] = "morphologist"
    morphologist_pipeline.attributes["subject"] = sid
    morphologist_pipeline.create_completion()

    # Create morphologist expected tree
    # ToDo: use ImportT1 from axon
    subjectdir = os.path.join(outdir, study, sid)
    if os.path.isdir(subjectdir):
        raise Exception("Folder '{0}' already created.".format(subjectdir))
    os.makedirs(os.path.join(
        subjectdir, "t1mri", "default_acquisition",
        "default_analysis", "folds", "3.1", "default_session_auto"))
    os.makedirs(os.path.join(
        subjectdir, "t1mri", "default_acquisition",
        "registration"))
    os.makedirs(os.path.join(
        subjectdir, "t1mri", "default_acquisition",
        "segmentation", "mesh"))
    os.makedirs(os.path.join(
        subjectdir, "t1mri", "default_acquisition",
        "tmp"))

    # Copy T1 file in the morphologist expected location
    destfile = os.path.join(subjectdir, "t1mri",
                            "default_acquisition", sid + ".nii.gz")
    shutil.copy(t1file, destfile)

    # Create source_referential morphologist expected file
    source_referential = {"uuid": str(soma.uuid.Uuid())}
    referential_file = os.path.join(
        subjectdir, "t1mri", "default_acquisition", "registration",
        "RawT1-{0}_default_acquisition.referential".format(sid))
    attributes = "attributes = {0}".format(json.dumps(source_referential))
    with open(referential_file, "w") as openfile:
        openfile.write(attributes)

    # Create a worflow from the morphologist pipeline
    workflow = Workflow(name="{0} {1}".format(study, sid),
                        jobs=[])
    workflow.root_group = []

    # Create the workflow
    wf = pipeline_workflow.workflow_from_pipeline(
        morphologist_pipeline.process, study_config=study_config)
    workflow.add_workflow(wf, as_group="{0}_{1}".format(study, sid))
    wffile = os.path.join(subjectdir, "{0}.wf".format(study))
    pickle.dump(workflow, open(wffile, "w"))

    # Execute the workflow with somaworkflow
    if somaworkflow:
        controller = WorkflowController()
        wfid = controller.submit_workflow(
            workflow=workflow, name="{0}_{1}".format(study, sid))

        # Return the worflow status after execution
        while True:
            time.sleep(waittime)
            wfstatus = controller.workflow_status(wfid)
            if wfstatus not in [
                    "worklflow_not_started", "workflow_in_progress"]:
                break

    # Execute the workflow with subprocess
    else:
        # -> construct the ordered list of commands to be executed
        workflow_repr = workflow.to_dict()
        graph = Graph()
        for job in workflow_repr["jobs"]:
            graph.add_node(GraphNode(job, None))
        for link in workflow_repr["dependencies"]:
            graph.add_link(link[0], link[1])
        ordered_nodes = [str(node[0]) for node in graph.topological_sort()]
        commands = []
        jobs = workflow_repr["serialized_jobs"]
        temporaries = workflow_repr["serialized_temporary_paths"]
        barriers = workflow_repr["serialized_barriers"]
        for index in ordered_nodes:
            if index in jobs:
                commands.append(jobs[index]["command"])
            elif index in barriers:
                continue
            else:
                raise Exception("Unexpected node in workflow.")

        # -> Go through all commands
        tmpmap = {}
        for cmd in commands:
            # -> deal with temporary files
            for index, item in enumerate(cmd):
                if not isinstance(item, basestring):
                    if str(item) not in tmpmap:
                        if str(item) in temporaries:
                            struct = temporaries[str(item)]
                            name = cmd[2].split(";")[1].split()[-1]
                            tmppath = os.path.join(
                                subjectdir, "t1mri", "default_acquisition",
                                "tmp", str(item) + name + struct["suffix"])
                            tmpmap[str(item)] = tmppath
                        else:
                            raise MorphologistError(
                                "Can't complete command '{0}'.".format(
                                    cmd))
                    cmd[index] = tmpmap[str(item)]

            # -> execute the command
            worker = MorphologistWrapper(cmd)
            worker()
            if worker.exitcode != 0:
                raise MorphologistRuntimeError(
                    " ".join(worker.cmd), worker.stderr)

        wfstatus = "Done"
        wfid = "subprocess"

    return wffile, wfid, wfstatus
Example #19
0
logging.basicConfig(level=logging.INFO)

# CAPSUL import
from capsul.qt_gui.widgets import PipelineDevelopperView
from capsul.study_config.study_config import StudyConfig
from capsul.process.loader import get_process_instance

# CAPS import
from caps.toy_datasets import get_sample_data


# Configure the environment
start_time = datetime.datetime.now()
print "Start Configuration", start_time
study_config = StudyConfig(
    modules=["SmartCachingConfig"],
    use_smart_caching=True,
    output_directory="/volatile/nsap/catalogue/quality_assurance/")
print "Done in {0} seconds".format(datetime.datetime.now() - start_time)


# Create pipeline
start_time = datetime.datetime.now()
print "Start Pipeline Creation", start_time
pipeline = get_process_instance("mmqa.fmri.fmri_quality_assurance.xml")
print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)


# Set pipeline input parameters
start_time = datetime.datetime.now()
print "Start Parametrization", start_time
localizer_dataset = get_sample_data("localizer")
Example #20
0
    def __init__(self, pipeline_menu, ui_file, default_study_config=None):
        """ Method to initialize the Capsul main window class.

        Parameters
        ----------
        pipeline_menu: hierachic dict
            each key is a sub module of the module. Leafs contain a list with
            the url to the documentation.
        ui_file: str (mandatory)
            a filename containing the user interface description
        default_study_config: ordered dict (madatory)
            some parameters for the study configuration
        """
        # Inheritance: load user interface window
        MyQUiLoader.__init__(self, ui_file)

        # Class parameters
        self.pipeline_menu = pipeline_menu
        self.pipelines = {}
        self.pipeline = None
        self.path_to_pipeline_doc = {}

        # Define dynamic controls
        self.controls = {
            QtGui.QAction: [
                "actionHelp", "actionQuit", "actionBrowse", "actionLoad",
                "actionChangeView", "actionParameters", "actionRun",
                "actionStudyConfig", "actionQualityControl"
            ],
            QtGui.QTabWidget: [
                "display",
            ],
            QtGui.QDockWidget: [
                "dockWidgetBrowse", "dockWidgetParameters",
                "dockWidgetStudyConfig", "dockWidgetBoard"
            ],
            QtGui.QWidget: [
                "dock_browse", "dock_parameters", "dock_study_config",
                "dock_board"
            ],
            QtGui.QTreeWidget: [
                "menu_treectrl",
            ],
            QtGui.QLineEdit: [
                "search",
            ],
        }

        # Add ui class parameter with the dynamic controls and initialize
        # default values
        self.add_controls_to_ui()
        self.ui.display.setTabsClosable(True)

        # Create the study configuration
        self.study_config = StudyConfig(default_study_config)

        # Create the controller widget associated to the study
        # configuration controller
        self.study_config_widget = ScrollControllerWidget(self.study_config,
                                                          live=True)
        self.ui.dockWidgetStudyConfig.setWidget(self.study_config_widget)

        # Create the pipeline menu
        fill_treectrl(self.ui.menu_treectrl, self.pipeline_menu)

        # Signal for window interface
        self.ui.actionHelp.triggered.connect(self.onHelpClicked)
        self.ui.actionChangeView.triggered.connect(self.onChangeViewClicked)

        # Signal for tab widget
        self.ui.display.currentChanged.connect(self.onCurrentTabChanged)
        self.ui.display.tabCloseRequested.connect(self.onCloseTabClicked)

        # Signal for dock widget
        self.ui.actionBrowse.triggered.connect(self.onBrowseClicked)
        self.ui.actionParameters.triggered.connect(self.onParametersClicked)
        self.ui.actionStudyConfig.triggered.connect(self.onStudyConfigClicked)
        self.ui.actionQualityControl.triggered.connect(
            self.onQualityControlClicked)

        # Initialize properly the visibility of each dock widget
        self.onBrowseClicked()
        self.onParametersClicked()
        self.onStudyConfigClicked()
        self.onQualityControlClicked()

        # Signal for the pipeline creation
        self.ui.search.textChanged.connect(self.onSearchClicked)
        self.ui.menu_treectrl.currentItemChanged.connect(
            self.onTreeSelectionChanged)
        self.ui.actionLoad.triggered.connect(self.onLoadClicked)

        # Signal for the execution
        self.ui.actionRun.triggered.connect(self.onRunClicked)
Example #21
0
def pilot_dcm2nii():
    """
    Imports
    -------

    This code needs 'capsul' and 'mmutils' package in order to instanciate and
    execute the pipeline and to get a toy dataset.
    These packages are available in the 'neurospin' source list or in pypi.
    """
    import os
    import sys
    import shutil
    import tempfile
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance
    from mmutils.toy_datasets import get_sample_data
    """
    Parameters
    ----------

    The 'pipeline_name' parameter contains the location of the pipeline XML
    description that will perform the DICOMs conversion, and the 'outdir' the
    location of the pipeline's results: in this case a temporary directory.
    """
    pipeline_name = "dcmio.dcmconverter.dcm_to_nii.xml"
    outdir = tempfile.mkdtemp()
    """
    Capsul configuration
    --------------------

    A 'StudyConfig' has to be instantiated in order to execute the pipeline
    properly. It enables us to define the results directory through the
    'output_directory' attribute, the number of CPUs to be used through the
    'number_of_cpus' attributes, and to specify that we want a log of the
    processing step through the 'generate_logging'. The 'use_scheduler'
    must be set to True if more than 1 CPU is used.
    """
    study_config = StudyConfig(modules=[],
                               output_directory=outdir,
                               number_of_cpus=1,
                               generate_logging=True,
                               use_scheduler=True)
    """
    Get the toy dataset
    -------------------

    The toy dataset is composed of a 3D heart dicom image that is downloaded
    if it is necessary throught the 'get_sample_data' function and exported
    locally in a 'heart.dcm' file.
    """
    dicom_dataset = get_sample_data("dicom")
    dcmfolder = os.path.join(outdir, "dicom")
    if not os.path.isdir(dcmfolder):
        os.makedirs(dcmfolder)
    shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm"))
    """
    Pipeline definition
    -------------------

    The pipeline XML description is first imported throught the
    'get_process_instance' method, and the resulting pipeline instance is
    parametrized: in this example we decided to set the date in the converted
    file name and we set two DICOM directories to be converted in Nifti
    format.
    """
    pipeline = get_process_instance(pipeline_name)
    pipeline.date_in_filename = True
    pipeline.dicom_directories = [dcmfolder, dcmfolder]
    pipeline.additional_informations = [[("Provided by", "Neurospin@2015")],
                                        [("Provided by", "Neurospin@2015"),
                                         ("TR", "1500")]]

    pipeline.dcm_tags = [("TR", [("0x0018", "0x0080")]),
                         ("TE", [("0x0018", "0x0081")])]
    """
    Pipeline representation
    -----------------------

    By executing this block of code, a pipeline representation can be
    displayed. This representation is composed of boxes connected to each
    other.
    """
    if 0:
        from capsul.qt_gui.widgets import PipelineDevelopperView
        from PySide import QtGui
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
    """
    Pipeline execution
    ------------------

    Finally the pipeline is eecuted in the defined 'study_config'.
    """
    study_config.run(pipeline)
    """
    Access the result
    -----------------

    The 'nibabel' package is used to load the generated images. We display the
    numpy array shape and the stored repetiton and echo times: in order
    to load the 'descrip' image field we use the 'json' package.
    """
    import json
    import copy
    import nibabel

    generated_images = pipeline.filled_converted_files

    for fnames in generated_images:
        print(">>>", fnames, "...")
        im = nibabel.load(fnames[0])
        print("shape=", im.get_data().shape)
        header = im.get_header()
        a = str(header["descrip"])
        a = a.strip()
        description = json.loads(copy.deepcopy(a))
        print("TE=", description["TE"])
        print("TR=", description["TR"])
        print("Provided by=", description["Provided by"])