コード例 #1
0
    def setUp(self):
        """ In the setup construct the pipeline and set some input parameters.
        """
        self.directory = tempfile.mkdtemp(prefix="capsul_test")

        self.study_config = StudyConfig()

        # Construct the pipeline
        self.pipeline = self.study_config.get_process_instance(MyPipeline)

        # Set some input parameters
        self.pipeline.input_image = [
            os.path.join(self.directory, "toto"),
            os.path.join(self.directory, "tutu")
        ]
        self.pipeline.dynamic_parameter = [3, 1]
        self.pipeline.other_input = 5

        # build a pipeline with dependencies
        self.small_pipeline \
            = self.study_config.get_process_instance(MySmallPipeline)
        self.small_pipeline.files_to_create = [
            os.path.join(self.directory, "toto"),
            os.path.join(self.directory, "tutu")
        ]
        self.small_pipeline.dynamic_parameter = [3, 1]
        self.small_pipeline.other_input = 5

        # build a bigger pipeline with several levels
        self.big_pipeline \
            = self.study_config.get_process_instance(MyBigPipeline)
コード例 #2
0
class TestSomaWorkflow(unittest.TestCase):
    def setUp(self):
        default_config = SortedDictionary(("use_soma_workflow", True))
        # use a custom temporary soma-workflow dir to avoid concurrent
        # access problems
        tmpdb = tempfile.mkstemp('', prefix='soma_workflow')
        os.close(tmpdb[0])
        os.unlink(tmpdb[1])
        self.soma_workflow_temp_dir = tmpdb[1]
        os.mkdir(self.soma_workflow_temp_dir)
        swf_conf = StringIO.StringIO('[%s]\nSOMA_WORKFLOW_DIR = %s\n' \
            % (socket.gethostname(), tmpdb[1]))
        swconfig.Configuration.search_config_path \
            = staticmethod(lambda : swf_conf)
        self.study_config = StudyConfig(init_config=default_config)
        self.atomic_pipeline = MyAtomicPipeline()
        self.composite_pipeline = MyCompositePipeline()

    def tearDown(self):
        shutil.rmtree(self.soma_workflow_temp_dir)

    def test_atomic_dependencies(self):
        workflow = workflow_from_pipeline(self.atomic_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertTrue(len(dependencies) == 4)
        self.assertTrue(("node1", "node2") in dependencies)
        self.assertTrue(("node1", "node3") in dependencies)
        self.assertTrue(("node2", "node4") in dependencies)
        self.assertTrue(("node3", "node4") in dependencies)
        self.assertEqual(workflow.groups, [])

    def test_atomic_execution(self):
        self.atomic_pipeline.workflow_ordered_nodes()
        if sys.version_info >= (2, 7):
            self.assertIn(
                self.atomic_pipeline.workflow_repr,
                ('node1->node3->node2->node4', 'node1->node2->node3->node4'))
        else:  # python 2.6 unittest does not have assertIn()
            self.assertTrue(self.atomic_pipeline.workflow_repr in \
                ('node1->node3->node2->node4',
                'node1->node2->node3->node4'))
        self.study_config.run(self.atomic_pipeline)

    def test_composite_dependencies(self):
        workflow = workflow_from_pipeline(self.composite_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertTrue(len(dependencies) == 16)
        self.assertEqual(dependencies.count(("node1", "node2")), 1)
        self.assertEqual(dependencies.count(("node1", "node3")), 2)
        self.assertEqual(dependencies.count(("node2", "node4")), 1)
        self.assertEqual(dependencies.count(("node3", "node4")), 2)
        self.assertEqual(dependencies.count(("node1", "node2_input")), 1)
        self.assertEqual(dependencies.count(("node2_output", "node4")), 1)
        self.assertTrue(len(workflow.groups) == 1)

    def test_composite_execution(self):
        self.composite_pipeline.workflow_ordered_nodes()
        self.assertTrue(self.composite_pipeline.workflow_repr in (
            "node1->node3->node2->node4", "node1->node2->node3->node4"))
        self.study_config.run(self.composite_pipeline)
コード例 #3
0
 def setUp(self):
     default_config = SortedDictionary(
         ("use_soma_workflow", True)
     )
     self.study_config = StudyConfig(init_config=default_config)
     self.atomic_pipeline = MyAtomicPipeline()
     self.composite_pipeline = MyCompositePipeline()
コード例 #4
0
 def setUp(self):
     """ Initialize the TestQCNodes class
     """
     self.pipeline = MyPipeline()
     self.pipeline.input = 'dummy_input'
     self.pipeline.output = 'dummy_output'
     self.output_directory = tempfile.mkdtemp()
     self.study_config = StudyConfig(output_directory=self.output_directory)
コード例 #5
0
ファイル: test_qc_nodes.py プロジェクト: servoz/capsul
class TestQCNodes(unittest.TestCase):
    """ Test pipeline node types.
    """

    def setUp(self):
        """ Initialize the TestQCNodes class
        """
        self.pipeline = MyPipeline()
        self.pipeline.input = 'dummy_input'
        self.pipeline.output = 'dummy_output'
        self.output_directory = tempfile.mkdtemp()
        self.study_config = StudyConfig(output_directory=self.output_directory)
        self.pipeline.set_study_config(self.study_config)

    def tearDown(self):
        """ Remove temporary items.
        """
        shutil.rmtree(self.output_directory)

    def test_qc_active(self):
        """ Method to test if the run qc option works properly.
        """
        # Execute all the pipeline nodes
        self.study_config.run(self.pipeline, execute_qc_nodes=True)

        # Get the list of all the nodes that havec been executed
        execution_list = self.pipeline.workflow_ordered_nodes()

        # Go through all the executed nodes
        for process_node in execution_list:

            # Get the process instance that has been executed
            process_instance = process_node.process

            # Check that the node has been executed
            self.assertEqual(process_instance.log_file, "in")

    def test_qc_inactive(self):
        """ Method to test if the run qc option works properly.
        """
        # Execute all the pipeline nodes
        self.study_config.run(self.pipeline, execute_qc_nodes=False)

        # Get the list of all the nodes
        execution_list = self.pipeline.workflow_ordered_nodes()

        # Go through all the nodes
        for process_node in execution_list:

            # Get the process instance that may have been executed
            process_instance = process_node.process

            # Check that view nodes are not executed
            if process_node.node_type == "view_node":
                self.assertEqual(process_instance.log_file, None)
            else:
                self.assertEqual(process_instance.log_file, "in")
コード例 #6
0
 def test_loo_py_io(self):
     sc = StudyConfig()
     pipeline = sc.get_process_instance(PipelineLOO)
     py_file = tempfile.mkstemp(suffix='_capsul.py')
     pyfname = py_file[1]
     os.close(py_file[0])
     self.temp_files.append(pyfname)
     python_export.save_py_pipeline(pipeline, pyfname)
     pipeline2 = sc.get_process_instance(pyfname)
     self._test_loo_pipeline(pipeline2)
コード例 #7
0
ファイル: test_custom_nodes.py プロジェクト: servoz/capsul
 def test_custom_nodes_py_io(self):
     sc = StudyConfig()
     pipeline = sc.get_process_instance(Pipeline1)
     py_file = tempfile.mkstemp(suffix='_capsul.py')
     pyfname = py_file[1]
     os.close(py_file[0])
     self.add_py_tmpfile(pyfname)
     python_export.save_py_pipeline(pipeline, pyfname)
     pipeline2 = sc.get_process_instance(pyfname)
     self._test_custom_nodes(pipeline)
コード例 #8
0
 def test_loo_xml_io(self):
     sc = StudyConfig()
     pipeline = sc.get_process_instance(PipelineLOO)
     xml_file = tempfile.mkstemp(suffix='_capsul.xml')
     xmlfname = xml_file[1]
     os.close(xml_file[0])
     self.temp_files.append(xmlfname)
     xml.save_xml_pipeline(pipeline, xmlfname)
     pipeline2 = sc.get_process_instance(xmlfname)
     self._test_loo_pipeline(pipeline2)
コード例 #9
0
ファイル: test_fom_process.py プロジェクト: servoz/capsul
def init_study_config(init_config={}):
    study_config = StudyConfig('test_study',
                               modules=['FomConfig', 'SomaWorkflowConfig'],
                               init_config=init_config)
    study_config.input_directory = '/tmp/in'
    study_config.output_directory = '/tmp/out'
    study_config.attributes_schema_paths.append(
        'capsul.attributes.test.test_attributed_process')

    return study_config
コード例 #10
0
ファイル: test_qc_nodes.py プロジェクト: cedrixic/capsul
class TestQCNodes(unittest.TestCase):
    """ Test pipeline node types.
    """

    def setUp(self):
        """ Initialize the TestQCNodes class
        """
        self.pipeline = MyPipeline()
        self.output_directory = tempfile.mkdtemp()
        self.study_config = StudyConfig(output_directory=self.output_directory)

    def __del__(self):
        """ Remove temporary items.
        """
        shutil.rmtree(self.output_directory)

    def test_qc_active(self):
        """ Method to test if the run qc option works properly.
        """
        # Execute all the pipeline nodes
        self.study_config.run(self.pipeline, executer_qc_nodes=True)

        # Get the list of all the nodes that havec been executed
        execution_list = self.pipeline.workflow_ordered_nodes()

        # Go through all the executed nodes
        for process_node in execution_list:

            # Get the process instance that has been executed
            process_instance = process_node.process

            # Check that the node has been executed
            self.assertEqual(process_instance.log_file, "in")

    def test_qc_inactive(self):
        """ Method to test if the run qc option works properly.
        """
        # Execute all the pipeline nodes
        self.study_config.run(self.pipeline, executer_qc_nodes=False)

        # Get the list of all the nodes
        execution_list = self.pipeline.workflow_ordered_nodes()

        # Go through all the nodes
        for process_node in execution_list:

            # Get the process instance that may have been executed
            process_instance = process_node.process

            # Check that view nodes are not executed
            if process_node.node_type == "view_node":
                self.assertEqual(process_instance.log_file, None)
            else:
                self.assertEqual(process_instance.log_file, "in")
コード例 #11
0
 def test_custom_nodes_workflow(self):
     sc = StudyConfig()
     pipeline = sc.get_process_instance(Pipeline1)
     pipeline.main_input = '/dir/file'
     pipeline.output_directory = '/dir/out_dir'
     wf = pipeline_workflow.workflow_from_pipeline(pipeline,
                                                   create_directories=False)
     self.assertEqual(len(wf.jobs), 3)
     self.assertEqual(len(wf.dependencies), 2)
     self.assertEqual(
         sorted([[x.name for x in d] for d in wf.dependencies]),
         sorted([['train1', 'train2'], ['train2', 'test']]))
コード例 #12
0
ファイル: test_custom_nodes.py プロジェクト: servoz/capsul
 def test_custom_nodes_workflow(self):
     sc = StudyConfig()
     pipeline = sc.get_process_instance(Pipeline1)
     pipeline.main_input = os.path.join(self.temp_dir, 'file')
     pipeline.output_directory = os.path.join(self.temp_dir, 'out_dir')
     wf = pipeline_workflow.workflow_from_pipeline(pipeline,
                                                   create_directories=False)
     self.assertEqual(len(wf.jobs), 7)
     self.assertEqual(len(wf.dependencies), 6)
     self.assertEqual(
         sorted([[x.name for x in d] for d in wf.dependencies]),
         sorted([['LOO', 'train1'], ['train1', 'train2'],
                 ['train1', 'intermediate_output'], ['train2', 'test'],
                 ['train2', 'output_file'], ['test', 'test_output']]))
コード例 #13
0
class TestSomaWorkflow(unittest.TestCase):
    def setUp(self):
        default_config = SortedDictionary(("use_soma_workflow", True), )
        self.study_config = StudyConfig(init_config=default_config)
        self.atomic_pipeline = MyAtomicPipeline()
        self.composite_pipeline = MyCompositePipeline()

    def tearDown(self):
        swm = self.study_config.modules['SomaWorkflowConfig']
        swc = swm.get_workflow_controller()
        if swc is not None:
            # stop workflow controller and wait for thread termination
            swc.stop_engine()

    def test_atomic_dependencies(self):
        workflow = workflow_from_pipeline(self.atomic_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertTrue(len(dependencies) == 4)
        self.assertTrue(("node1", "node2") in dependencies)
        self.assertTrue(("node1", "node3") in dependencies)
        self.assertTrue(("node2", "node4") in dependencies)
        self.assertTrue(("node3", "node4") in dependencies)
        self.assertEqual(workflow.groups, [])

    def test_atomic_execution(self):
        self.atomic_pipeline.workflow_ordered_nodes()
        self.assertIn(
            self.atomic_pipeline.workflow_repr,
            ('node1->node3->node2->node4', 'node1->node2->node3->node4'))
        self.study_config.run(self.atomic_pipeline)

    def test_composite_dependencies(self):
        workflow = workflow_from_pipeline(self.composite_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertTrue(len(dependencies) == 8)
        self.assertEqual(dependencies.count(("node1", "node2")), 1)
        self.assertEqual(dependencies.count(("node1", "node3")), 2)
        self.assertEqual(dependencies.count(("node2", "node4")), 1)
        self.assertEqual(dependencies.count(("node3", "node4")), 2)
        self.assertEqual(dependencies.count(("node1", "node1")), 1)
        self.assertEqual(dependencies.count(("node4", "node4")), 1)
        self.assertTrue(len(workflow.groups) == 1)

    def test_composite_execution(self):
        self.composite_pipeline.workflow_ordered_nodes()
        self.assertTrue(self.composite_pipeline.workflow_repr in (
            "node1->node3->node2->node4", "node1->node2->node3->node4"))
        self.study_config.run(self.composite_pipeline)
コード例 #14
0
    def display_parameters(self, item):
        """ This method was used to display the parameters of a process.
        It will be useful to generate processes in the near future.
        """

        study_config = StudyConfig(modules=StudyConfig.default_modules +
                                   ['NipypeConfig'])

        process_name = item.text(0)
        list_path = []
        while item is not self.process_library.topLevelItem(0):
            item = item.parent()
            list_path.append(item.text(0))

        list_path = list(reversed(list_path))
        package_name = '.'.join(list_path)

        __import__(package_name)
        pkg = sys.modules[package_name]

        for k, v in sorted(list(pkg.__dict__.items())):
            if k == process_name:
                try:
                    process = get_process_instance(v)
                except:
                    print('AIEEEE')
                    pass
                else:
                    print(process.get_inputs())
                txt = "Inputs: \n" + str(v.input_spec())
                txt2 = "\nOutputs: \n" + str(v.output_spec())
                self.label_test.setText(txt + txt2)
コード例 #15
0
ファイル: test_soma_workflow.py プロジェクト: M40V/capsul
 def setUp(self):
     default_config = SortedDictionary(("use_soma_workflow", True))
     # use a custom temporary soma-workflow dir to avoid concurrent
     # access problems
     tmpdb = tempfile.mkstemp('', prefix='soma_workflow')
     os.close(tmpdb[0])
     os.unlink(tmpdb[1])
     self.soma_workflow_temp_dir = tmpdb[1]
     os.mkdir(self.soma_workflow_temp_dir)
     swf_conf = '[%s]\nSOMA_WORKFLOW_DIR = %s\n' \
         % (socket.gethostname(), tmpdb[1])
     swconfig.Configuration.search_config_path \
         = staticmethod(lambda : StringIO.StringIO(swf_conf))
     self.study_config = StudyConfig(init_config=default_config)
     self.atomic_pipeline = MyAtomicPipeline()
     self.composite_pipeline = MyCompositePipeline()
コード例 #16
0
    def setUp(self):
        tmpout = tempfile.mkdtemp(prefix='capsul_ex_test_')

        self.work_dir = tmpout
        print('working dir:', tmpout)
        self.input = os.path.join(self.work_dir, 'input_data')

        stdout = open(os.path.join(self.work_dir, 'stdout'), 'w')
        subprocess.check_call(['generate_data', self.input],
                              stdout=stdout, stderr=stdout)
        del stdout
        self.output = os.path.join(self.work_dir, 'output_data')

        study_config = StudyConfig(modules=['SomaWorkflowConfig',
                                            'AttributesConfig'])
        study_config.input_directory = self.input
        study_config.output_directory = os.path.join(
            self.work_dir, 'output_data')
        study_config.shared_directory = os.path.join(self.input, 'share')
        study_config.somaworkflow_computing_resource = 'localhost'
        study_config.somaworkflow_computing_resources_config.localhost = {
            'transfer_paths': [],
        }
        study_config.attributes_schema_paths += [
            'bv_capsul_ex.adhoc_completion',
            'bv_capsul_ex.tests.test_ex_pipeline',
            'bv_capsul_ex.schema']
        study_config.process_completion = 'bv_capsul_ex'
        study_config.attributes_schemas = {'input': 'bv_capsul_ex',
                                           'output': 'bv_capsul_ex',
                                           'shared': 'bv_capsul_shared'}
        self.study_config = study_config
コード例 #17
0
ファイル: test_custom_nodes.py プロジェクト: servoz/capsul
 def test_mapreduce(self):
     sc = StudyConfig()
     pipeline = sc.get_process_instance(PipelineMapReduce)
     pipeline.main_inputs = [
         os.path.join(self.temp_dir, 'file%d' % i) for i in range(4)
     ]
     pipeline.subjects = ['Robert', 'Gustave']
     pipeline.output_directory = os.path.join(self.temp_dir, 'out_dir')
     self.assertEqual(pipeline.nodes['cat'].process.files, [
         os.path.join(pipeline.output_directory,
                      '%s_test_output' % pipeline.subjects[0]),
         os.path.join(pipeline.output_directory,
                      '%s_test_output' % pipeline.subjects[1])
     ])
     wf = pipeline_workflow.workflow_from_pipeline(pipeline,
                                                   create_directories=False)
     self.assertEqual(len(wf.jobs), 19)
     #print(sorted([(d[0].name, d[1].name) for d in wf.dependencies]))
     self.assertEqual(len(wf.dependencies), 28)
コード例 #18
0
def init_study_config(init_config={}):
    study_config = StudyConfig('test_study',
                               modules=['AttributesConfig',
                                        'SomaWorkflowConfig'],
                               init_config=init_config)
    study_config.input_directory = '/tmp/in'
    study_config.output_directory = '/tmp/out'
    study_config.attributes_schema_paths \
        = study_config.attributes_schema_paths \
            + ['capsul.attributes.test.test_attributed_process']
    study_config.attributes_schemas['input'] = 'custom_ex'
    study_config.attributes_schemas['output'] = 'custom_ex'
    #print('attributes_schema_paths:', study_config.attributes_schema_paths)
    study_config.path_completion = 'custom_ex'
    #print('attributes_schema_paths 2:', study_config.attributes_schema_paths)

    return study_config
コード例 #19
0
 def setUp(self):
     default_config = SortedDictionary(
         ("use_soma_workflow", True)
     )
     # use a custom temporary soma-workflow dir to avoid concurrent
     # access problems
     tmpdb = tempfile.mkstemp('', prefix='soma_workflow')
     os.close(tmpdb[0])
     os.unlink(tmpdb[1])
     self.soma_workflow_temp_dir = tmpdb[1]
     os.mkdir(self.soma_workflow_temp_dir)
     swf_conf = StringIO.StringIO('[%s]\nSOMA_WORKFLOW_DIR = %s\n' \
         % (socket.gethostname(), tmpdb[1]))
     swconfig.Configuration.search_config_path \
         = staticmethod(lambda : swf_conf)
     self.study_config = StudyConfig(init_config=default_config)
     self.atomic_pipeline = MyAtomicPipeline()
     self.composite_pipeline = MyCompositePipeline()
コード例 #20
0
def init_study_config(init_config={}):
    study_config = StudyConfig('test_study',
                               modules=['AttributesConfig',
                                        'SomaWorkflowConfig'],
                               init_config=init_config)
    study_config.input_directory = '/tmp/in'
    study_config.output_directory = '/tmp/out'
    study_config.attributes_schema_paths.append(
        'capsul.attributes.test.test_attributed_process')
    study_config.attributes_schemas['input'] = 'custom_ex'
    study_config.attributes_schemas['output'] = 'custom_ex'
    study_config.path_completion = 'custom_ex'

    return study_config
コード例 #21
0

def check_call(study_config, batch_file, **kwargs):
    '''
    Equivalent to Python soma.subprocess.check_call for SPM batch
    '''
    check_spm_configuration(study_config)
    cmd = spm_command(study_config, batch_file)
    return soma.subprocess.check_call(cmd, **kwargs)


def check_output(study_config, command, **kwargs):
    '''
    Equivalent to Python soma.subprocess.check_output for SPM batch
    '''
    check_spm_configuration(study_config)
    cmd = spm_command(study_config, batch_file)
    return soma.subprocess.check_output(cmd, **kwargs)


if __name__ == '__main__':
    from capsul.api import StudyConfig
    from capsul.soma.subprocess.spm import check_call as call_spm
    import tempfile

    sc = StudyConfig(spm_directory='/home/yc176684/spm12-standalone-7219')
    batch = tempfile.NamedTemporaryFile(suffix='.m')
    batch.write("fprintf(1, '%s', spm('dir'));")
    batch.flush()
    call_spm(sc, batch.name)
コード例 #22
0
def main():
    ''' Run the :mod:`capsul.process.runprocess` module as a commandline
    '''

    usage = '''Usage: python -m capsul [options] processname [arg1] [arg2] ...
    [argx=valuex] [argy=valuey] ...

    Example:
    python -m capsul threshold ~/data/irm.ima /tmp/th.nii threshold1=80

    Named arguments (in the shape argx=valuex) may address sub-processes of a
    pipeline, using the dot separator:

    PrepareSubject.t1mri=/home/myself/mymri.nii

    For a more precise description, please look at the web documentation:
    http://brainvisa.info/capsul/user_doc/user_guide_tree/index.html
    '''

    # Set up logging on stderr. This must be called before any logging takes
    # place, to avoid "No handlers could be found for logger" errors.
    logging.basicConfig()

    parser = OptionParser(description='Run a single CAPSUL process',
                          usage=usage)
    group1 = OptionGroup(
        parser,
        'Config',
        description='Processing configuration, database options')
    group1.add_option(
        '--studyconfig',
        dest='studyconfig',
        help='load StudyConfig configuration from the given file (JSON)')
    group1.add_option('-i',
                      '--input',
                      dest='input_directory',
                      help='input data directory (if not specified in '
                      'studyconfig file). If not specified neither on the '
                      'commandline nor study configfile, taken as the same as '
                      'output.')
    group1.add_option('-o',
                      '--output',
                      dest='output_directory',
                      help='output data directory (if not specified in '
                      'studyconfig file). If not specified neither on the '
                      'commandline nor study configfile, taken as the same as '
                      'input.')
    parser.add_option_group(group1)

    group2 = OptionGroup(
        parser,
        'Processing',
        description='Processing options, distributed execution')
    group2.add_option('--swf',
                      '--soma_workflow',
                      dest='soma_workflow',
                      default=False,
                      action='store_true',
                      help='use soma_workflow. Soma-Workflow '
                      'configuration has to be setup and valid for non-local '
                      'execution, and additional file transfer options '
                      'may be used. The default is *not* to use SWF and '
                      'process mono-processor, sequential execution.')
    group2.add_option('-r',
                      '--resource_id',
                      dest='resource_id',
                      default=None,
                      help='soma-workflow resource ID, defaults to localhost')
    group2.add_option('-p',
                      '--password',
                      dest='password',
                      default=None,
                      help='password to access the remote computing resource. '
                      'Do not specify it if using a ssh key')
    group2.add_option('--rsa-pass',
                      dest='rsa_key_pass',
                      default=None,
                      help='RSA key password, for ssh key access')
    group2.add_option('--queue',
                      dest='queue',
                      default=None,
                      help='Queue to use on the computing resource. If not '
                      'specified, use the default queue.')
    #group2.add_option('--input-processing', dest='input_file_processing',
    #default=None, help='Input files processing: local_path, '
    #'transfer, translate, or translate_shared. The default is '
    #'local_path if the computing resource is the localhost, or '
    #'translate_shared otherwise.')
    #group2.add_option('--output-processing', dest='output_file_processing',
    #default=None, help='Output files processing: local_path, '
    #'transfer, or translate. The default is local_path.')
    group2.add_option('--keep-succeeded-workflow',
                      dest='keep_succeded_workflow',
                      action='store_true',
                      default=False,
                      help='keep the workflow in the computing resource '
                      'database after execution. By default it is removed.')
    group2.add_option('--delete-failed-workflow',
                      dest='delete_failed_workflow',
                      action='store_true',
                      default=False,
                      help='delete the workflow in the computing resource '
                      'database after execution, if it has failed. By default '
                      'it is kept.')
    parser.add_option_group(group2)

    group3 = OptionGroup(parser, 'Iteration', description='Iteration')
    group3.add_option('-I',
                      '--iterate',
                      dest='iterate_on',
                      action='append',
                      help='Iterate the given process, iterating over the '
                      'given parameter(s). Multiple parameters may be '
                      'iterated jointly using several -I options. In the '
                      'process parameters, values are replaced by lists, all '
                      'iterated lists should have the same size.\n'
                      'Ex:\n'
                      'python -m capsul -I par_a -I par_c a_process '
                      'par_a="[1, 2]" par_b="something" '
                      'par_c="[\\"one\\", \\"two\\"]"')
    parser.add_option_group(group3)

    group4 = OptionGroup(parser, 'Attributes completion')
    group4.add_option('-a',
                      '--attribute',
                      dest='attributes',
                      action='append',
                      default=[],
                      help='set completion (including FOM) attribute. '
                      'Syntax: attribute=value, value the same syntax as '
                      'process parameters (python syntax for lists, for '
                      'instance), with proper quotes if needed for shell '
                      'escaping.\n'
                      'Ex: -a acquisition="default" '
                      '-a subject=\'["s1", "s2"]\'')
    parser.add_option_group(group4)

    group5 = OptionGroup(parser,
                         'Help',
                         description='Help and documentation options')
    group5.add_option('--process-help',
                      dest='process_help',
                      action='store_true',
                      default=False,
                      help='display specified process help')
    parser.add_option_group(group5)

    parser.disable_interspersed_args()
    (options, args) = parser.parse_args()

    if options.studyconfig:
        study_config = StudyConfig(modules=StudyConfig.default_modules +
                                   ['FomConfig', 'BrainVISAConfig'])
        if yaml:
            scdict = yaml.load(open(options.studyconfig))
        else:
            scdict = json.load(open(options.studyconfig))
        study_config.set_study_configuration(scdict)
    else:
        study_config = StudyConfig()
        study_config.read_configuration()

    if options.input_directory:
        study_config.input_directory = options.input_directory
    if options.output_directory:
        study_config.output_directory = options.output_directory
    if study_config.output_directory in (None, Undefined) \
            and study_config.input_directory not in (None, Undefined):
        study_config.output_directory = study_config.input_directory
    if study_config.input_directory in (None, Undefined) \
            and study_config.output_directory not in (None, Undefined):
        study_config.input_directory = study_config.output_directory
    study_config.somaworkflow_keep_succeeded_workflows \
        = options.keep_succeded_workflow
    study_config.somaworkflow_keep_failed_workflows \
        = not options.delete_failed_workflow

    kwre = re.compile('([a-zA-Z_](\.?[a-zA-Z0-9_])*)\s*=\s*(.*)$')

    attributes = {}
    for att in options.attributes:
        m = kwre.match(att)
        if m is None:
            raise SyntaxError('syntax error in attribute definition: %s' % att)
        attributes[m.group(1)] = convert_commandline_parameter(m.group(3))

    args = tuple((convert_commandline_parameter(i) for i in args))
    kwargs = {}
    todel = []
    for arg in args:
        if isinstance(arg, six.string_types):
            m = kwre.match(arg)
            if m is not None:
                kwargs[m.group(1)] = convert_commandline_parameter(m.group(3))
                todel.append(arg)
    args = [arg for arg in args if arg not in todel]

    if not args:
        parser.print_usage()
        sys.exit(2)

    # get the main process
    process_name = args[0]
    args = args[1:]

    iterated = options.iterate_on
    try:
        process = get_process_with_params(process_name, study_config, iterated,
                                          attributes, *args, **kwargs)
    except ProcessParamError as e:
        print("error: {0}".format(e), file=sys.stderr)
        sys.exit(1)

    if options.process_help:
        process.help()

        print()

        completion_engine \
            = ProcessCompletionEngine.get_completion_engine(process)
        attribs = completion_engine.get_attribute_values()
        aval = attribs.export_to_dict()
        print('Completion attributes:')
        print('----------------------')
        print()
        print('(note: may differ depending on study config file contents, '
              'completion rules (FOM)...)')
        print()

        skipped = set(['generated_by_parameter', 'generated_by_process'])
        for name, value in six.iteritems(aval):
            if name in skipped:
                continue
            ttype = attribs.trait(name).trait_type.__class__.__name__
            if isinstance(attribs.trait(name).trait_type, List):
                ttype += '(%s)' \
                    % attribs.trait(name).inner_traits[
                        0].trait_type.__class__.__name__
            print('%s:' % name, ttype)
            if value not in (None, Undefined):
                print('   ', value)

        print()
        del aval, attribs, completion_engine, process
        sys.exit(0)

    resource_id = options.resource_id
    password = options.password
    rsa_key_pass = options.rsa_key_pass
    queue = options.queue
    file_processing = []

    study_config.use_soma_workflow = options.soma_workflow

    if options.soma_workflow:
        file_processing = [None, None]

    else:
        file_processing = [None, None]

    res = run_process_with_distribution(
        study_config,
        process,
        options.soma_workflow,
        resource_id=resource_id,
        password=password,
        rsa_key_pass=rsa_key_pass,
        queue=queue,
        input_file_processing=file_processing[0],
        output_file_processing=file_processing[1])

    sys.exit(0)
コード例 #23
0
class TestSomaWorkflow(unittest.TestCase):

    def setUp(self):
        default_config = SortedDictionary(
            ("use_soma_workflow", True)
        )
        self.study_config = StudyConfig(init_config=default_config)
        self.atomic_pipeline = MyAtomicPipeline()
        self.composite_pipeline = MyCompositePipeline()

    def tearDown(self):
        swm = self.study_config.modules['SomaWorkflowConfig']
        swc = swm.get_workflow_controller()
        if swc is not None:
            # stop workflow controller and wait for thread termination
            swc.stop_engine()

    def test_atomic_dependencies(self):
        workflow = workflow_from_pipeline(self.atomic_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertTrue(len(dependencies) == 4)
        self.assertTrue(("node1", "node2") in dependencies)
        self.assertTrue(("node1", "node3") in dependencies)
        self.assertTrue(("node2", "node4") in dependencies)
        self.assertTrue(("node3", "node4") in dependencies)
        self.assertEqual(workflow.groups, [])

    def test_atomic_execution(self):
        self.atomic_pipeline.workflow_ordered_nodes()
        self.assertIn(self.atomic_pipeline.workflow_repr,
                      ('node1->node3->node2->node4',
                       'node1->node2->node3->node4'))
        tmp1 = tempfile.mkstemp('', prefix='capsul_swf')
        os.write(tmp1[0], 'bidibidi'.encode())
        os.close(tmp1[0])
        tmp2 = tempfile.mkstemp('', prefix='capsul_swf_out')
        os.close(tmp2[0])
        os.unlink(tmp2[1])
        try:
            self.atomic_pipeline.input_image = tmp1[1]
            self.atomic_pipeline.output_image = tmp2[1]
            self.study_config.run(self.atomic_pipeline)
            self.assertTrue(os.path.exists(tmp2[1]))
            with open(tmp2[1]) as f:
                content = f.read()
            self.assertEqual(content, 'bidibidibidibidi')
        finally:
            if os.path.exists(tmp1[1]):
                os.unlink(tmp1[1])
            if os.path.exists(tmp2[1]):
                os.unlink(tmp2[1])

    def test_atomic_execution_kwparams(self):
        tmp1 = tempfile.mkstemp('', prefix='capsul_swf')
        os.write(tmp1[0], 'bidibidi'.encode())
        os.close(tmp1[0])
        tmp2 = tempfile.mkstemp('', prefix='capsul_swf_out')
        os.close(tmp2[0])
        os.unlink(tmp2[1])
        try:
            atomic_pipeline = MyAtomicPipeline()
            self.study_config.run(atomic_pipeline,
                                  input_image=tmp1[1], output_image=tmp2[1])
            self.assertTrue(os.path.exists(tmp2[1]))
            with open(tmp2[1]) as f:
                content = f.read()
            self.assertEqual(content, 'bidibidibidibidi')
        finally:
            if os.path.exists(tmp1[1]):
                os.unlink(tmp1[1])
            if os.path.exists(tmp2[1]):
                os.unlink(tmp2[1])

    def test_composite_dependencies(self):
        workflow = workflow_from_pipeline(self.composite_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertEqual(len(dependencies), 8)
        self.assertEqual(dependencies.count(("node1", "node2")), 1)
        self.assertEqual(dependencies.count(("node1", "node3")), 2)
        self.assertEqual(dependencies.count(("node2", "node4")), 1)
        self.assertEqual(dependencies.count(("node3", "node4")), 2)
        self.assertEqual(dependencies.count(("node1", "node1")), 1)
        self.assertEqual(dependencies.count(("node4", "node4")), 1)
        #self.assertEqual(dependencies.count(("node1", "node2_input")), 1)
        #self.assertEqual(dependencies.count(("node2_output", "node4")), 1)
        self.assertTrue(len(workflow.groups) == 1)

    def test_composite_execution(self):
        self.composite_pipeline.workflow_ordered_nodes()
        self.assertTrue(self.composite_pipeline.workflow_repr in
                        ("node1->node3->node2->node4",
                         "node1->node2->node3->node4"))
        tmp1 = tempfile.mkstemp('', prefix='capsul_swf')
        os.write(tmp1[0], 'bidibidi'.encode())
        os.close(tmp1[0])
        tmp2 = tempfile.mkstemp('', prefix='capsul_swf_out')
        os.close(tmp2[0])
        os.unlink(tmp2[1])
        try:
            self.composite_pipeline.input_image = tmp1[1]
            self.composite_pipeline.output_image = tmp2[1]
            self.study_config.run(self.composite_pipeline)
            self.assertTrue(os.path.exists(tmp2[1]))
            with open(tmp2[1]) as f:
                content = f.read()
            self.assertEqual(content, 'bidibidibidibidibidibidi')
        finally:
            if os.path.exists(tmp1[1]):
                os.unlink(tmp1[1])
            if os.path.exists(tmp2[1]):
                os.unlink(tmp2[1])
コード例 #24
0
class TestPipeline(unittest.TestCase):
    """ Class to test a pipeline with an iterative node
    """
    def setUp(self):
        """ In the setup construct the pipeline and set some input parameters.
        """
        self.directory = tempfile.mkdtemp(prefix="capsul_test")

        self.study_config = StudyConfig()

        # Construct the pipeline
        self.pipeline = self.study_config.get_process_instance(MyPipeline)

        # Set some input parameters
        self.pipeline.input_image = [
            os.path.join(self.directory, "toto"),
            os.path.join(self.directory, "tutu")
        ]
        self.pipeline.dynamic_parameter = [3, 1]
        self.pipeline.other_input = 5

        # build a pipeline with dependencies
        self.small_pipeline \
            = self.study_config.get_process_instance(MySmallPipeline)
        self.small_pipeline.files_to_create = [
            os.path.join(self.directory, "toto"),
            os.path.join(self.directory, "tutu")
        ]
        self.small_pipeline.dynamic_parameter = [3, 1]
        self.small_pipeline.other_input = 5

        # build a bigger pipeline with several levels
        self.big_pipeline \
            = self.study_config.get_process_instance(MyBigPipeline)

    def tearDown(self):
        swm = self.study_config.modules['SomaWorkflowConfig']
        swc = swm.get_workflow_controller()
        if swc is not None:
            # stop workflow controller and wait for thread termination
            swc.stop_engine()
        if debug:
            print('directory %s not removed.' % self.directory)
        else:
            shutil.rmtree(self.directory)

    def test_iterative_pipeline_connection(self):
        """ Test if an iterative process works correctly
        """

        # create inputs
        for f in self.pipeline.input_image:
            with open(f, "w") as fobj:
                fobj.write("input: %s\n" % f)

        # Test the output connection
        self.pipeline()

        if sys.version_info >= (2, 7):
            self.assertIn(
                "toto-5.0-3.0",
                [os.path.basename(f) for f in self.pipeline.output_image])
            self.assertIn(
                "tutu-5.0-1.0",
                [os.path.basename(f) for f in self.pipeline.output_image])
        else:
            self.assertTrue(
                "toto-5.0-3.0" in
                [os.path.basename(f) for f in self.pipeline.output_image])
            self.assertTrue(
                "tutu-5.0-1.0" in
                [os.path.basename(f) for f in self.pipeline.output_image])
        self.assertEqual(
            self.pipeline.other_output,
            [self.pipeline.other_input, self.pipeline.other_input])

    def test_iterative_pipeline_workflow(self):
        self.small_pipeline.output_image = [
            os.path.join(self.directory, 'toto_out'),
            os.path.join(self.directory, 'tutu_out')
        ]
        self.small_pipeline.other_output = [1., 2.]
        workflow = pipeline_workflow.workflow_from_pipeline(
            self.small_pipeline)
        #expect 2 + 2 (iter) + 2 (barriers) jobs
        self.assertEqual(len(workflow.jobs), 6)
        # expect 6 dependencies:
        # init -> iterative input barrier
        # iterative output barrier -> end
        # iterative input barrier -> iterative jobs (2)
        # iterative jobs -> iterative output barrier (2)
        self.assertEqual(len(workflow.dependencies), 6)

    def test_iterative_big_pipeline_workflow(self):
        self.big_pipeline.files_to_create = [["toto", "tutu"],
                                             ["tata", "titi", "tete"]]
        self.big_pipeline.dynamic_parameter = [[1, 2], [3, 4, 5]]
        self.big_pipeline.other_input = 5
        self.big_pipeline.output_image = [
            [
                os.path.join(self.directory, 'toto_out'),
                os.path.join(self.directory, 'tutu_out')
            ],
            [
                os.path.join(self.directory, 'tata_out'),
                os.path.join(self.directory, 'titi_out'),
                os.path.join(self.directory, 'tete_out')
            ]
        ]
        self.big_pipeline.other_output = [[1.1, 2.1], [3.1, 4.1, 5.1]]
        workflow = pipeline_workflow.workflow_from_pipeline(self.big_pipeline)
        # expect 6 + 7 + 2 jobs
        self.assertEqual(len(workflow.jobs), 15)
        subjects = set()
        for job in workflow.jobs:
            if not job.name.startswith('DummyProcess') or '_map' in job.name \
                    or '_reduce' in job.name:
                continue
            param_dict = job.param_dict
            self.assertEqual(param_dict["other_input"], 5)
            subject = param_dict['input_image']
            subjects.add(subject)
            if sys.version_info >= (2, 7):
                self.assertIn(subject,
                              ["toto", "tutu", "tata", "titi", "tete"])
            else:
                self.assertTrue(
                    subject in ["toto", "tutu", "tata", "titi", "tete"])
        self.assertEqual(subjects,
                         set(["toto", "tutu", "tata", "titi", "tete"]))

    def test_iterative_pipeline_workflow_run(self):
        import soma_workflow.constants as swconstants
        import soma_workflow.client as swclient

        self.small_pipeline.output_image = [
            os.path.join(self.directory, 'toto_out'),
            os.path.join(self.directory, 'tutu_out')
        ]
        self.small_pipeline.other_output = [1., 2.]
        workflow = pipeline_workflow.workflow_from_pipeline(
            self.small_pipeline)
        swclient.Helper.serialize(
            os.path.join(self.directory, 'smallpipeline.workflow'), workflow)

        self.study_config.use_soma_workflow = True

        #controller = swclient.WorkflowController(config=config)
        #try:

        #wf_id = controller.submit_workflow(workflow)
        print('* running pipeline...')
        #swclient.Helper.wait_workflow(wf_id, controller)
        self.study_config.run(self.small_pipeline)
        print('* finished.')
        #workflow_status = controller.workflow_status(wf_id)
        #elements_status = controller.workflow_elements_status(wf_id)
        #failed_jobs = [element for element in elements_status[0] \
        #if element[1] != swconstants.DONE \
        #or element[3][0] != swconstants.FINISHED_REGULARLY]
        #if not debug:
        #controller.delete_workflow(wf_id)
        #self.assertTrue(workflow_status == swconstants.WORKFLOW_DONE,
        #'Workflow did not finish regularly: %s' % workflow_status)
        #self.assertTrue(len(failed_jobs) == 0, 'Jobs failed: %s'
        #% failed_jobs)
        # check output files contents
        for ifname, fname in zip(self.small_pipeline.files_to_create,
                                 self.small_pipeline.output_image):
            with open(fname) as f:
                content = f.read()
            self.assertEqual(content, "file: %s\n" % ifname)
コード例 #25
0
ファイル: test_qc_nodes.py プロジェクト: cedrixic/capsul
 def setUp(self):
     """ Initialize the TestQCNodes class
     """
     self.pipeline = MyPipeline()
     self.output_directory = tempfile.mkdtemp()
     self.study_config = StudyConfig(output_directory=self.output_directory)
コード例 #26
0
 def test_leave_one_out_pipeline(self):
     sc = StudyConfig()
     pipeline = sc.get_process_instance(PipelineLOO)
     self._test_loo_pipeline(pipeline)
コード例 #27
0
class TestSomaWorkflow(unittest.TestCase):

    def setUp(self):
        default_config = SortedDictionary(
            ("use_soma_workflow", True)
        )
        # use a custom temporary soma-workflow dir to avoid concurrent
        # access problems
        tmpdb = tempfile.mkstemp('', prefix='soma_workflow')
        os.close(tmpdb[0])
        os.unlink(tmpdb[1])
        self.soma_workflow_temp_dir = tmpdb[1]
        os.mkdir(self.soma_workflow_temp_dir)
        swf_conf = StringIO.StringIO('[%s]\nSOMA_WORKFLOW_DIR = %s\n' \
            % (socket.gethostname(), tmpdb[1]))
        swconfig.Configuration.search_config_path \
            = staticmethod(lambda : swf_conf)
        self.study_config = StudyConfig(init_config=default_config)
        self.atomic_pipeline = MyAtomicPipeline()
        self.composite_pipeline = MyCompositePipeline()

    def tearDown(self):
        shutil.rmtree(self.soma_workflow_temp_dir)

    def test_atomic_dependencies(self):
        workflow = workflow_from_pipeline(self.atomic_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertTrue(len(dependencies) == 4)
        self.assertTrue(("node1", "node2") in dependencies)
        self.assertTrue(("node1", "node3") in dependencies)
        self.assertTrue(("node2", "node4") in dependencies)
        self.assertTrue(("node3", "node4") in dependencies)
        self.assertEqual(workflow.groups, [])

    def test_atomic_execution(self):
        self.atomic_pipeline.workflow_ordered_nodes()
        if sys.version_info >= (2, 7):
            self.assertIn(self.atomic_pipeline.workflow_repr,
                          ('node1->node3->node2->node4',
                           'node1->node2->node3->node4'))
        else: # python 2.6 unittest does not have assertIn()
            self.assertTrue(self.atomic_pipeline.workflow_repr in \
                ('node1->node3->node2->node4',
                'node1->node2->node3->node4'))
        tmp1 = tempfile.mkstemp('', prefix='capsul_swf')
        os.write(tmp1[0], 'bidibidi'.encode())
        os.close(tmp1[0])
        tmp2 = tempfile.mkstemp('', prefix='capsul_swf_out')
        os.close(tmp2[0])
        os.unlink(tmp2[1])
        self.atomic_pipeline.input_image = tmp1[1]
        self.atomic_pipeline.output_image = tmp2[1]
        self.study_config.run(self.atomic_pipeline)
        self.assertTrue(os.path.exists(tmp2[1]))
        content = open(tmp2[1]).read()
        self.assertEqual(content, 'bidibidibidibidi')
        os.unlink(tmp1[1])
        os.unlink(tmp2[1])

    def test_composite_dependencies(self):
        workflow = workflow_from_pipeline(self.composite_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertTrue(len(dependencies) == 16)
        self.assertEqual(dependencies.count(("node1", "node2")), 1)
        self.assertEqual(dependencies.count(("node1", "node3")), 2)
        self.assertEqual(dependencies.count(("node2", "node4")), 1)
        self.assertEqual(dependencies.count(("node3", "node4")), 2)
        self.assertEqual(dependencies.count(("node1", "node2_input")), 1)
        self.assertEqual(dependencies.count(("node2_output", "node4")), 1)
        self.assertTrue(len(workflow.groups) == 1)

    def test_composite_execution(self):
        self.composite_pipeline.workflow_ordered_nodes()
        self.assertTrue(self.composite_pipeline.workflow_repr in
                        ("node1->node3->node2->node4",
                         "node1->node2->node3->node4"))
        tmp1 = tempfile.mkstemp('', prefix='capsul_swf')
        os.write(tmp1[0], 'bidibidi'.encode())
        os.close(tmp1[0])
        tmp2 = tempfile.mkstemp('', prefix='capsul_swf_out')
        os.close(tmp2[0])
        os.unlink(tmp2[1])
        self.composite_pipeline.input_image = tmp1[1]
        self.composite_pipeline.output_image = tmp2[1]
        self.study_config.run(self.composite_pipeline)
        self.assertTrue(os.path.exists(tmp2[1]))
        content = open(tmp2[1]).read()
        self.assertEqual(content, 'bidibidibidibidibidibidi')
        os.unlink(tmp1[1])
        os.unlink(tmp2[1])
コード例 #28
0
 def test_custom_nodes(self):
     sc = StudyConfig()
     pipeline = sc.get_process_instance(Pipeline1)
     self._test_custom_nodes(pipeline)
コード例 #29
0
ファイル: capsul_pipeline_view.py プロジェクト: servoz/capsul
pipelines = find_pipeline_and_process(
    os.path.basename(options.module))["pipeline_descs"]
logger.info("Found '{0}' pipeline(s) in '{1}'.".format(
    len(pipelines), options.module))

# Sort pipelines processes
# From the pipelines full path 'm1.m2.pipeline' get there module names 'm2'
module_names = set([x.split(".")[1] for x in pipelines])
# Sort each pipeline according to its module name.
# The result is a dict of the form 'd[m2] = [pipeline1, pipeline2, ...]'.
sorted_pipelines = dict((x, []) for x in module_names)
for pipeline in pipelines:
    module_name = pipeline.split(".")[1]
    sorted_pipelines[module_name].append(pipeline)

study_config = StudyConfig(modules=StudyConfig.default_modules + ['FomConfig'])

# Generate a png representation of each pipeline.
for module_name, module_pipelines in sorted_pipelines.items():

    # this docwriter is juste used to manage short names
    docwriter = PipelineHelpWriter([], short_names=short_names)

    # Where the documentation will be written: a relative path from the
    # makefile
    short_name = docwriter.get_short_name(module_name)
    outdir = os.path.join(base_outdir, short_name,  "schema")
    if not os.path.isdir(outdir):
        os.makedirs(outdir)

    # Go through all pipeline